code
stringlengths
1
13.8M
cnView_buildMain <- function(x, y, z=NULL, chr, CNscale=FALSE, layers=NULL, segmentColor=NULL) { dummy_data <- geom_point(data=y, mapping=aes_string(x='coordinate', y=2), alpha=0) theme <- theme(axis.text.x=element_text(angle=30, hjust=1)) if(CNscale == "relative") { shade_cn <- scale_color_gradient2("Copy Number", midpoint=0, low=' mid='gray65', high=' space='Lab') ylabel <- ylab('Copy Number Difference') } else if(CNscale == "absolute") { shade_cn <- scale_color_gradient2("Copy Number", midpoint=2, low=' mid='gray65', high=' space='Lab') ylabel <- ylab('Absolute Copy Number') } else { memo <- paste0("Did not recognize input to CNscale... defaulting to", "absolute scale, please specify \"relative\"", "if copy neutral calls == 0") warning(memo) } xlabel <- xlab('Coordinate') if(!is.null(layers)) { layers <- layers } else { layers <- geom_blank() } if(any('p_value' %in% colnames(x))) { x$transparency <- 1-x$p_value cnpoints <- geom_point(data=x, mapping=aes_string(x='coordinate', y='cn', colour='cn', alpha='transparency')) transparency <- scale_alpha(guide='none') } else { cnpoints <- geom_point(data=x, mapping=aes_string(x='coordinate', y='cn', colour='cn')) transparency <- geom_blank() } if(!is.null(z)) { colour = ifelse(is.null(segmentColor), "green", segmentColor) cnseg <- geom_segment(data=z, mapping=aes_string(x='start', xend='end', y='segmean', yend='segmean'), colour=colour, size=2) } else { cnseg <- geom_blank() } tmp <- data.frame(x=0, y=0) p1 <- ggplot(data=tmp, aes(x=0)) + cnpoints + shade_cn + ylabel + xlabel + theme_bw() + theme + cnseg + dummy_data + transparency + layers if(chr == 'all') { facet <- facet_wrap(~chromosome, scales='free') p1 <- p1 + facet } return(p1) }
context("secs-hms") test_that(".secs function works", { expect_equal(.secs("15"), 15) expect_equal(.secs("15s"), 15) expect_equal(.secs("1m"), 60) expect_equal(.secs("3m15s"), 195) expect_equal(.secs("1h3m15s"), 3795) }) test_that(".hms function works", { expect_identical(.hms(3810), "1h3m30s") expect_identical(.hms("3810"), "1h3m30s") expect_identical(.hms("3m15s"), "0h3m15s") expect_identical(.hms("3m"), "0h3m0s") })
library(tfruns) FLAGS <- flags( flag_numeric('learning_rate', 2e-5, 'Initial learning rate.'), flag_numeric('max_steps', 1e-6, 'Number of steps to run trainer.') )
checkTestOutput <- function(rtIn, rtSave=paste(rtIn, ".save", sep=""), debug=TRUE) { rtOut <- gsub("\\.Rt$", ".Rout", rtIn, perl=TRUE) rtFail <- gsub("\\.Rt$", ".Rt.fail", rtIn, perl=TRUE) rtLog <- paste(rtIn, ".log", sep="") if (!file.exists(rtIn)) { msg <- paste("checkTestOutput: cannot find original test file '", rtIn, "' in '", getwd(), "'\n", sep="") cat(file=rtFail, msg) cat(file=stdout(), msg) return(NULL) } if (!file.exists(rtSave)) { msg <- paste("checkTestOutput: cannot find saved-test-object file '", rtSave, "' in '", getwd(), "'\n", sep="") cat(file=rtFail, msg) cat(file=stdout(), msg) return(NULL) } if (!file.exists(rtOut)) { msg <- paste("checkTestOutput: cannot actual test output file '", rtOut, "' in '", getwd(), "'\n", sep="") cat(file=rtFail, msg) cat(file=stdout(), msg) return(NULL) } if (debug) cat(" * Loading saved transcript object from file \"", rtSave, "\" ...\n", sep="", file=stdout()) testObjName <- load(file=rtSave, envir=as.environment(-1)) if (testObjName[1] != "tests") tests <- get(testObjName[1]) cat(" * Parsing actual test output from file \"", rtOut, "\" ...\n", sep="", file=stdout()) resList <- parseTranscriptFile(rtOut, ignoreUpToRegExpr="> res <- compareTranscriptAndOutput(sub(".Rout", ".Rt", rtOut), tests, resList, verbose=TRUE) res.summary <- summary(res) print(res.summary) sink(rtLog) print(res) print(res.summary) sink() testResultsFile <- "test-summary.txt" if (file.exists(testResultsFile)) { testResults <- scan("test-summary.txt", quiet=T, what=list(name="", ntests=0, NULL, NULL, nerr=0, NULL, nwarn=0, NULL, NULL, nmess=0, NULL), fill=T) if (length(testResults$name)>0 && testResults$name[length(testResults$name)]=="total:") testResults <- lapply(testResults, "[", -length(testResults$name)) } else { testResults <- list(name=character(0), ntests=numeric(0), c1=NULL, c2=NULL, nerr=numeric(0), c3=NULL, nwarn=numeric(0), c4=NULL, c5=NULL, nmess=numeric(0), c6=NULL) } i <- match(paste(rtIn, ":", sep=""), testResults$name, nomatch=length(testResults$name)+1) testResults$name[i] <- paste(rtIn, ":", sep="") testResults$ntests[i] <- res.summary$n testResults$nerr[i] <- res.summary$counts["error"] testResults$nwarn[i] <- res.summary$counts["warning"] testResults$nmess[i] <- res.summary$counts["info"] i <- length(testResults$name)+1 testResults$name[i] <- "total:" testResults$ntests[i] <- sum(testResults$ntests, na.rm=T) testResults[[3]] <- "tests" testResults[[4]] <- "with" testResults$nerr[i] <- sum(testResults$nerr, na.rm=T) testResults[[6]] <- "errors," testResults$nwarn[i] <- sum(testResults$nwarn, na.rm=T) testResults[[8]] <- "warnings" testResults[[9]] <- "and" testResults$nmess[i] <- sum(testResults$nmess, na.rm=T) testResults[[11]] <- "messages" sink(testResultsFile) cat(do.call("paste", lapply(testResults, format)), sep="\n") sink() testResultsFile <- "test-summary.fail" if (sum(testResults$nerr, na.rm=T) > 0) { sink(testResultsFile) cat(do.call("paste", lapply(testResults, format)), sep="\n") sink() } return(res) }
createStudyPopulation <- function(plpData, population = NULL, outcomeId, binary = T, includeAllOutcomes = T, firstExposureOnly = FALSE, washoutPeriod = 0, removeSubjectsWithPriorOutcome = TRUE, priorOutcomeLookback = 99999, requireTimeAtRisk = F, minTimeAtRisk=365, riskWindowStart = 0, startAnchor = "cohort start", riskWindowEnd = 365, endAnchor = "cohort start", verbosity = "INFO", restrictTarToCohortEnd = F, addExposureDaysToStart, addExposureDaysToEnd, ...) { if(!missing(addExposureDaysToStart)){ if(is.null(startAnchor)){ warning('addExposureDaysToStart is depreciated - please use startAnchor instead') startAnchor <- ifelse(addExposureDaysToStart, 'cohort end','cohort start') } else { warning('addExposureDaysToStart specificed so being used') warning('addExposureDaysToStart is depreciated - please use startAnchor instead') startAnchor <- ifelse(addExposureDaysToStart, 'cohort end','cohort start') } } if(!missing(addExposureDaysToEnd)){ if(is.null(endAnchor)){ warning('addExposureDaysToEnd is depreciated - please use endAnchor instead') endAnchor <- ifelse(addExposureDaysToEnd, 'cohort end','cohort start') } else { warning('addExposureDaysToEnd specificed so being used') warning('addExposureDaysToEnd is depreciated - please use endAnchor instead') endAnchor <- ifelse(addExposureDaysToEnd, 'cohort end','cohort start') } } if(missing(verbosity)){ verbosity <- "INFO" } else{ if(!verbosity%in%c("DEBUG","TRACE","INFO","WARN","FATAL","ERROR")){ stop('Incorrect verbosity string') } } if(length(ParallelLogger::getLoggers())==0){ logger <- ParallelLogger::createLogger(name = "SIMPLE", threshold = verbosity, appenders = list(ParallelLogger::createConsoleAppender(layout = ParallelLogger::layoutTimestamp))) ParallelLogger::registerLogger(logger) } if(!class(plpData)%in%c('plpData')){ ParallelLogger::logError('Check plpData format') stop('Wrong plpData input') } ParallelLogger::logDebug(paste0('outcomeId: ', outcomeId)) checkNotNull(outcomeId) ParallelLogger::logDebug(paste0('binary: ', binary)) checkBoolean(binary) ParallelLogger::logDebug(paste0('includeAllOutcomes: ', includeAllOutcomes)) checkBoolean(includeAllOutcomes) ParallelLogger::logDebug(paste0('firstExposureOnly: ', firstExposureOnly)) checkBoolean(firstExposureOnly) ParallelLogger::logDebug(paste0('washoutPeriod: ', washoutPeriod)) checkHigherEqual(washoutPeriod,0) ParallelLogger::logDebug(paste0('removeSubjectsWithPriorOutcome: ', removeSubjectsWithPriorOutcome)) checkBoolean(removeSubjectsWithPriorOutcome) if (removeSubjectsWithPriorOutcome){ ParallelLogger::logDebug(paste0('priorOutcomeLookback: ', priorOutcomeLookback)) checkHigher(priorOutcomeLookback,0) } ParallelLogger::logDebug(paste0('requireTimeAtRisk: ', requireTimeAtRisk)) checkBoolean(requireTimeAtRisk) ParallelLogger::logDebug(paste0('minTimeAtRisk: ', minTimeAtRisk)) checkHigherEqual(minTimeAtRisk,0) ParallelLogger::logDebug(paste0('restrictTarToCohortEnd: ', restrictTarToCohortEnd)) checkBoolean(restrictTarToCohortEnd) ParallelLogger::logDebug(paste0('riskWindowStart: ', riskWindowStart)) checkHigherEqual(riskWindowStart,0) ParallelLogger::logDebug(paste0('startAnchor: ', startAnchor)) if(!startAnchor%in%c('cohort start', 'cohort end')){ stop('Incorrect startAnchor') } ParallelLogger::logDebug(paste0('riskWindowEnd: ', riskWindowEnd)) checkHigherEqual(riskWindowEnd,0) ParallelLogger::logDebug(paste0('endAnchor: ', endAnchor)) if(!endAnchor%in%c('cohort start', 'cohort end')){ stop('Incorrect startAnchor') } if(requireTimeAtRisk){ if(startAnchor==endAnchor){ if(minTimeAtRisk>(riskWindowEnd-riskWindowStart)){ warning('issue: minTimeAtRisk is greater than max possible time-at-risk') } } } if (is.null(population)) { population <- plpData$cohorts } metaData <- attr(population, "metaData") metaData$outcomeId <- outcomeId metaData$binary <- binary metaData$includeAllOutcomes <- includeAllOutcomes metaData$firstExposureOnly = firstExposureOnly metaData$washoutPeriod = washoutPeriod metaData$removeSubjectsWithPriorOutcome = removeSubjectsWithPriorOutcome metaData$priorOutcomeLookback = priorOutcomeLookback metaData$requireTimeAtRisk = requireTimeAtRisk metaData$minTimeAtRisk=minTimeAtRisk metaData$riskWindowStart = riskWindowStart metaData$startAnchor = startAnchor metaData$riskWindowEnd = riskWindowEnd metaData$endAnchor = endAnchor if(is.null(metaData$attrition)){ metaData$attrition <- attr(plpData$cohorts, 'metaData')$attrition } if(!is.null(metaData$attrition)){ metaData$attrition <- data.frame(outcomeId = metaData$attrition$outcomeId, description = metaData$attrition$description, targetCount = metaData$attrition$targetCount, uniquePeople = metaData$attrition$uniquePeople, outcomes = metaData$attrition$outcomes) if(sum(metaData$attrition$outcomeId==outcomeId)>0){ metaData$attrition <- metaData$attrition[metaData$attrition$outcomeId==outcomeId,] } else{ metaData$attrition <- NULL } } oId <- outcomeId population <- population %>% dplyr::mutate(startAnchor = startAnchor, startDay = riskWindowStart, endAnchor = endAnchor, endDay = riskWindowEnd) %>% dplyr::mutate(tarStart = ifelse(.data$startAnchor == 'cohort start', .data$startDay, .data$startDay+ .data$daysToCohortEnd), tarEnd = ifelse(.data$endAnchor == 'cohort start', .data$endDay, .data$endDay+ .data$daysToCohortEnd)) %>% dplyr::mutate(tarEnd = ifelse(.data$tarEnd>.data$daysToObsEnd, .data$daysToObsEnd,.data$tarEnd )) if(max(population$daysToCohortEnd)>0 & restrictTarToCohortEnd){ ParallelLogger::logInfo('Restricting tarEnd to end of target cohort') population <- population %>% dplyr::mutate(tarEnd = ifelse(.data$tarEnd>.data$daysToCohortEnd, .data$daysToCohortEnd,.data$tarEnd )) } outcomeTAR <- population %>% dplyr::inner_join(plpData$outcomes, by ='rowId') %>% dplyr::filter(.data$outcomeId == get('oId')) %>% dplyr::select(.data$rowId, .data$daysToEvent, .data$tarStart, .data$tarEnd) %>% dplyr::filter(.data$daysToEvent >= .data$tarStart & .data$daysToEvent <= .data$tarEnd) if(nrow(as.data.frame(outcomeTAR))>0){ outcomeTAR <- outcomeTAR %>% dplyr::group_by(.data$rowId) %>% dplyr::summarise(first = min(.data$daysToEvent), ocount = length(unique(.data$daysToEvent))) %>% dplyr::select(.data$rowId, .data$first, .data$ocount) } else { outcomeTAR <- outcomeTAR %>% dplyr::mutate(first = 0, ocount = 0) %>% dplyr::select(.data$rowId, .data$first, .data$ocount) } population <- population %>% dplyr::left_join(outcomeTAR, by = 'rowId') attrRow <- population %>% dplyr::group_by() %>% dplyr::summarise(outcomeId = get('oId'), description = 'Initial plpData cohort or population', targetCount = length(.data$rowId), uniquePeople = length(unique(.data$subjectId)), outcomes = sum(!is.na(.data$first))) metaData$attrition <- rbind(metaData$attrition, attrRow) if (firstExposureOnly) { ParallelLogger::logTrace(paste("Restricting to first exposure")) population <- population %>% dplyr::arrange(.data$subjectId,.data$cohortStartDate) %>% dplyr::group_by(.data$subjectId) %>% dplyr::filter(dplyr::row_number(.data$subjectId)==1) attrRow <- population %>% dplyr::group_by() %>% dplyr::summarise(outcomeId = get('oId'), description = 'First Exposure', targetCount = length(.data$rowId), uniquePeople = length(unique(.data$subjectId)), outcomes = sum(!is.na(.data$first))) metaData$attrition <- rbind(metaData$attrition, attrRow) } if(washoutPeriod) { ParallelLogger::logTrace(paste("Requiring", washoutPeriod, "days of observation prior index date")) msg <- paste("At least", washoutPeriod, "days of observation prior") population <- population %>% dplyr::mutate(washoutPeriod = washoutPeriod) %>% dplyr::filter(.data$daysFromObsStart >= .data$washoutPeriod) attrRow <- population %>% dplyr::group_by() %>% dplyr::summarise(outcomeId = get('oId'), description = msg, targetCount = length(.data$rowId), uniquePeople = length(unique(.data$subjectId)), outcomes = sum(!is.na(.data$first))) metaData$attrition <- rbind(metaData$attrition, attrRow) } if(removeSubjectsWithPriorOutcome) { ParallelLogger::logTrace("Removing subjects with prior outcomes (if any)") outcomeBefore <- population %>% dplyr::inner_join(plpData$outcomes, by ='rowId') %>% dplyr::filter(outcomeId == get('oId')) %>% dplyr::select(.data$rowId, .data$daysToEvent, .data$tarStart) %>% dplyr::filter(.data$daysToEvent < .data$tarStart & .data$daysToEvent > -get('priorOutcomeLookback') ) if(nrow(as.data.frame(outcomeBefore))>0){ outcomeBefore %>% dplyr::group_by(.data$rowId) %>% dplyr::summarise(first = min(.data$daysToEvent)) %>% dplyr::select(.data$rowId) } population <- population %>% dplyr::filter(!.data$rowId %in% outcomeBefore$rowId ) attrRow <- population %>% dplyr::group_by() %>% dplyr::summarise(outcomeId = get('oId'), description = "No prior outcome", targetCount = length(.data$rowId), uniquePeople = length(unique(.data$subjectId)), outcomes = sum(!is.na(.data$first))) metaData$attrition <- rbind(metaData$attrition, attrRow) } if (requireTimeAtRisk) { if(includeAllOutcomes){ ParallelLogger::logTrace("Removing non outcome subjects with insufficient time at risk (if any)") population <- population %>% dplyr::filter(!is.na(.data$first) | .data$tarEnd >= .data$tarStart + minTimeAtRisk ) attrRow <- population %>% dplyr::group_by() %>% dplyr::summarise(outcomeId = get('oId'), description = "Removing non-outcome subjects with insufficient time at risk (if any)", targetCount = length(.data$rowId), uniquePeople = length(unique(.data$subjectId)), outcomes = sum(!is.na(.data$first))) metaData$attrition <- rbind(metaData$attrition, attrRow) } else { ParallelLogger::logTrace("Removing subjects with insufficient time at risk (if any)") population <- population %>% dplyr::filter( .data$tarEnd >= .data$tarStart + minTimeAtRisk ) attrRow <- population %>% dplyr::group_by() %>% dplyr::summarise(outcomeId = get('oId'), description = "Removing subjects with insufficient time at risk (if any)", targetCount = length(.data$rowId), uniquePeople = length(unique(.data$subjectId)), outcomes = sum(!is.na(.data$first))) metaData$attrition <- rbind(metaData$attrition, attrRow) } } else { ParallelLogger::logTrace("Removing subjects with no time at risk (if any)") population <- population %>% dplyr::filter( .data$tarEnd >= .data$tarStart ) attrRow <- population %>% dplyr::group_by() %>% dplyr::summarise(outcomeId = get('oId'), description = "Removing subjects with no time at risk (if any))", targetCount = length(.data$rowId), uniquePeople = length(unique(.data$subjectId)), outcomes = sum(!is.na(.data$first))) metaData$attrition <- rbind(metaData$attrition, attrRow) } if(binary){ ParallelLogger::logInfo("Outcome is 0 or 1") population <- population %>% dplyr::mutate(outcomeCount = ifelse(is.na(.data$ocount),0,1)) } else{ ParallelLogger::logTrace("Outcome is count") population <- population %>% dplyr::mutate(outcomeCount = ifelse(is.na(.data$ocount),0,.data$ocount)) } population <- population %>% dplyr::mutate(timeAtRisk = .data$tarEnd - .data$tarStart + 1 , survivalTime = ifelse(.data$outcomeCount == 0, .data$tarEnd -.data$tarStart + 1, .data$first - .data$tarStart + 1), daysToEvent = .data$first) %>% dplyr::select(.data$rowId, .data$subjectId, .data$cohortId, .data$cohortStartDate, .data$daysFromObsStart, .data$daysToCohortEnd, .data$daysToObsEnd, .data$ageYear, .data$gender, .data$outcomeCount, .data$timeAtRisk, .data$daysToEvent, .data$survivalTime) if(sum(!is.na(population$daysToEvent))==0){ ParallelLogger::logWarn('No outcomes left...') return(NULL) } population <- as.data.frame(population) attr(population, "metaData") <- metaData return(population) } getCounts <- function(population,description = "") { persons <- length(unique(population$subjectId)) targets <- nrow(population) counts <- data.frame(description = description, targetCount= targets, uniquePeople = persons) return(counts) } getCounts2 <- function(cohort,outcomes, description = "") { persons <- length(unique(cohort$subjectId)) targets <- nrow(cohort) outcomes <- stats::aggregate(cbind(count = outcomeId) ~ outcomeId, data = outcomes, FUN = function(x){NROW(x)}) counts <- data.frame(outcomeId = outcomes$outcomeId, description = description, targetCount= targets, uniquePeople = persons, outcomes = outcomes$count) return(counts) }
get_carrier <- function(x, strict = FALSE, safe = FALSE, locale = getOption("dialr.locale")) { if (!is.phone(x)) stop("`x` must be a vector of class `phone`.", call. = FALSE) carrier_mapper <- .get_phoneNumberToCarrierMapper() locale <- .jstr_to_locale(locale) if (safe) { region <- get_region(x) out <- NA out[!is.na(region)] <- phone_apply(x[!is.na(region)], function(pn) { .jcall(carrier_mapper, "S", "getSafeDisplayName", pn, locale) }, character(1)) } else { out <- phone_apply(x, function(pn) { .jcall(carrier_mapper, "S", "getNameForValidNumber", pn, locale) }, character(1)) } if (strict) out[!is_valid(x)] <- NA_character_ out }
barMiss <- function(x, delimiter = NULL, pos = 1, selection = c("any","all"), col = c("skyblue","red","skyblue4","red4","orange","orange4"), border = NULL, main = NULL, sub = NULL, xlab = NULL, ylab = NULL, axes = TRUE, labels = axes, only.miss = TRUE, miss.labels = axes, interactive = TRUE, ...) { check_data(x) x <- as.data.frame(x) imputed <- FALSE if(is.null(dim(x))) { if(is.continuous(x)) { histMiss(x, delimiter=delimiter, pos=pos, selection=selection, col=col, border=border, main=main, sub=sub, xlab=xlab, ylab=ylab, axes=axes, only.miss=only.miss, miss.labels=miss.labels, interactive=interactive, ...) return(invisible(1)) } n <- length(x) p <- 1 if(n == 0) stop("'x' must have positive length") } else { if(!(inherits(x, c("data.frame","matrix")))) { stop("'x' must be a data.frame or matrix") } if(is.continuous(x[, pos])) { histMiss(x, delimiter=delimiter, pos=pos, selection=selection, col=col, border=border, main=main, sub=sub, xlab=xlab, ylab=ylab, axes=axes, only.miss=only.miss, miss.labels=miss.labels, interactive=interactive, ...) return(invisible(1)) } if(!is.null(delimiter)) { tmp <- grep(delimiter, colnames(x)) if(length(tmp) > 0) { imp_var <- x[, tmp, drop=FALSE] x <- x[, -tmp, drop=FALSE] if(ncol(x) == 0) stop("Only the missing-index is given") if(is.matrix(imp_var) && range(imp_var) == c(0,1)) imp_var <- apply(imp_var,2,as.logical) if(is.null(dim(imp_var))) { if(!is.logical(imp_var)) stop("The missing-index of imputed Variables must be of the type logical") } else { if(!any(as.logical(lapply(imp_var,is.logical)))) stop("The missing-index of imputed Variables must be of the type logical") } imputed <- TRUE } else { warning("'delimiter' is given, but no missing-index-Variable is found", call. = FALSE) } } n <- nrow(x) p <- ncol(x) if(n == 0) stop("'x' has no rows") else if(p == 0) stop("'x' has no columns") if(is.null(colnames(x))) colnames(x) <- defaultNames(p) } if(p == 1) { only.miss <- TRUE interactive <- FALSE } else { if((!is.numeric(pos)) || (length(pos) != 1) || (p < pos)) { stop("'pos' must be an integer specifying one column of 'x' and must be lesser than the number of colums of 'x'") } selection <- match.arg(selection) } if(length(col) == 0) col <- c("skyblue","red","skyblue4","red4","orange","orange4") else if(length(col) == 1) col <- c(rep.int(c("transparent", col), 2),rep.int(col,2)) else if(length(col) == 3 || length(col) == 5) col <- rep.int(col[1:2], 3) else if(length(col) != 6) col <- rep(col, length.out=6) localAxis <- function(..., names.arg, axisnames, cex.names, names.miss) { axis(...) } localTitle <- function(..., names.arg, axisnames, cex.names, names.miss) { title(...) } dots <- list(...) nmdots <- names(dots) has.axisnames <- "axisnames" %in% nmdots if(missing(labels)) { if(has.axisnames) { if(dots$axisnames) { if("names.arg" %in% nmdots) labels <- dots$names.arg else labels <- TRUE } else labels <- FALSE } else if("names.arg" %in% nmdots) labels <- dots$names.arg } if(missing(miss.labels)) { if(has.axisnames) { if(dots$axisnames) { if("names.miss" %in% nmdots) miss.labels <- dots$names.miss else miss.labels <- TRUE } else miss.labels <- FALSE } else if("names.miss" %in% nmdots) miss.labels <- dots$names.miss } createPlot <- function(main=NULL, sub=NULL, xlab=NULL, ylab=NULL, labels=axes) { if(is.null(dim(x))) xpos <- as.factor(x) else if(p == 1) { xpos <- as.factor(x[,1]) if(is.null(xlab)) xlab <- colnames(x) } else { xpos <- as.factor(x[, pos]) xh <- x[, -pos, drop=FALSE] if(is.null(xlab)) xlab <- colnames(x)[pos] } if(p == 2 && is.null(ylab)) { if(!imputed) ylab <- paste("missing/observed in", colnames(x)[-pos]) else ylab <- paste("imputed/observed in", colnames(x)[-pos]) } x.axis <- TRUE if(is.logical(labels)) { if(!is.na(labels) && labels) labels <- NULL else x.axis <- FALSE } miss.axis <- TRUE if(is.logical(miss.labels)) { if(!is.na(miss.labels) && miss.labels) miss.labels <- NULL else miss.axis <- FALSE } impp <- FALSE if(!imputed) { misspos <- isNA(xpos) } else { tmp <- isImp(x, pos = pos, delimiter = delimiter, imp_var = imp_var, selection = selection) misspos <- tmp[["misspos"]] impp <- tmp[["impp"]] missh <- tmp[["missh"]] } missposf <- factor(ifelse(misspos, 1, 0), levels=0:1) if(p == 1) ct <- table(missposf)[2] else { if(!imputed) missh <- isNA(xh, selection) misshf <- factor(ifelse(missh, 1, 0), levels=1:0) ct <- table(misshf, missposf) ct[2,] <- ct[1,] + ct[2,] if(only.miss) ct <- ct[,2] } allNA <- all(misspos) if(allNA) { n <- 5 counts <- 0 } else { n <- length(levels(xpos)) counts <- summary(xpos[!misspos]) } br <- c(0.2, n*1.2) h <- br[2] - br[1] if(only.miss) { xlim <- c(br[1], br[2]+1+0.08*h) ylim <- c(0, max(summary(xpos))) } else { xlim <- c(br[1], br[2]+0.155*h) ylim <- c(0, max(counts)) } if(allNA) { b <- NULL labels <- character() plot(xlim, ylim, type="n", ann=FALSE, axes=FALSE, yaxs="i") if(only.miss && axes) localAxis(side=2, ...) } else { b <- barplot(counts, col=col[1], border=border, main="", sub="", xlab="", ylab="", xlim=xlim, ylim=ylim, axes=FALSE, axisnames=FALSE) if(p > 1 && any(missh)) { if(imputed) color <- col[5] else color <- col[2] indices <- which(is.na(x[,2]) & missh ==TRUE) missh2 <- missh missh2[-indices] <- FALSE countsmiss <- table(xpos[missh], useNA="no") countsmiss2 <- table(xpos[missh2], useNA="no") b <- barplot(countsmiss, col=color, border=border, add=TRUE, axes=FALSE, axisnames=FALSE) if(length(indices) > 0 & imputed) { b <- barplot(countsmiss2, col=col[2], border=border, add=TRUE, axes=FALSE, axisnames=FALSE) } } else if(p == 1 && impp == TRUE && any(misspos)) { countsmiss <- table(xpos[missh], useNA="no") b <- barplot(countsmiss, col=col[5], border=border, add=TRUE, axes=FALSE, axisnames=FALSE) } if(x.axis) { if(is.null(labels)) labels <- levels(xpos) else labels <- rep(labels, length.out=length(levels(xpos))) } if(axes) localAxis(side=2, ...) } localTitle(main, sub, xlab, ylab, ...) abline(v=br[2]+0.04*h, col="lightgrey") if(only.miss) { xleft <- br[2] + 0.08*h xright <- xlim[2] if(p == 1) { rect(xleft, 0, xright, ct, col=col[3], border=border, xpd=TRUE) } else { if(!imputed) color <- col[4:3] else color <- col[c(6,3)] rect(rep(xleft, 2), c(0, ct[1]), rep(xright, 2), ct, col=color, border=border, xpd=TRUE) } if(miss.axis) { miss.at <- xleft+(xright-xleft)/2 if(is.null(miss.labels)) { if(!imputed) miss.labels <- "missing" else miss.labels <- "imputed" } else miss.labels <- rep(miss.labels, length.out=1) } } else { usr <- par("usr") par(usr=c(usr[1:2], 0, max(ct[2,]))) on.exit(par(usr=usr)) zero <- br[2]+0.08*h xleft <- zero + c(0,0,1.5,1.5)*0.03*h ybottom <- c(0,ct[1,1],0,ct[1,2]) xright <- zero + c(1,1,2.5,2.5)*0.03*h ytop <- ct if(!imputed) color <- col[c(2,1,4,3)] else color <- col[c(5,1,6,3)] rect(xleft, ybottom, xright, ytop, col=color, border=border, xpd=TRUE) if(length(indices) > 0 & imputed) { sum_miss <- length(indices) xleft1 <- xleft[1] ybottom1 <- ybottom[1] xright1 <- xright[1] ytop1 <- sum_miss color1 <- col[2] rect(xleft1,ybottom1,xright1,ytop1,col=color1,border=border,xpd=TRUE) } if(miss.axis) { miss.at <- zero + c(0.5,2)*0.03*h if(is.null(miss.labels)) { if(!imputed) miss.labels <- c("observed","missing") else miss.labels <- c("observed","imputed") } else miss.labels <- rep(miss.labels, length.out=2) } if(axes) localAxis(side=4, ...) } if(x.axis || miss.axis) { x.axes <- TRUE dots$side <- 1 dots$at <- c(if(x.axis) b, if(miss.axis) miss.at) dots$labels <- c(if(x.axis) labels, if(miss.axis) miss.labels) if(is.null(dots$line)) dots$line <- par("mgp")[3] dots$lty <- 0 if(is.null(dots$las)) dots$las <- 3 if(dots$las %in% 2:3) { space.vert <- (par("oma")[1]+par("mar")[1]- dots$line-par("mgp")[2])*par("csi") ok <- prettyLabels(dots$labels, dots$at, space.vert, dots$cex.axis) if(any(ok)) { dots$at <- dots$at[ok] dots$labels <- dots$labels[ok] } else x.axes <- FALSE } if(x.axes) do.call(localAxis, dots) } return(b) } b <- createPlot(main, sub, xlab, ylab, labels) interactiveDevices <- c("X11cairo","quartz","windows") dev <- names(dev.cur()) if(interactive && any(!is.na(charmatch(interactiveDevices, dev)))) { cat(paste("\nClick in in the left margin to switch to the previous", "variable or in the right margin to switch to the next", "variable.\n")) cat(paste("To regain use of the VIM GUI and the R console,", "click anywhere else in the graphics window.\n\n")) usr <- par("usr") pt <- locatorVIM() while(!is.null(pt) && (pt$x < usr[1] || pt$x > usr[2])) { if(pt$x < usr[1]) pos <- if(pos == 1) p else (pos - 1) %% p else pos <- if(pos == p-1) p else (pos + 1) %% p b <- if(is.continuous(x[, pos])) { histMiss(if(imputed) cbind(x,imp_var) else x, delimiter = delimiter, pos=pos, selection=selection, col=col, border=border, axes=axes, only.miss=only.miss, miss.labels=miss.labels, interactive=FALSE, ...) } else createPlot(labels=if(is.logical(labels)) labels else axes) usr <- par("usr") pt <- locatorVIM() } } invisible(b) }
fd2list <- function(fdobj) { coef <- fdobj$coefs coefsize <- dim(coef) nrep <- coefsize[2] if (length(coefsize) > 2) stop("FDOBJ is not univariate.") fdlist <- vector("list",0) for (i in 1:nrep) fdlist[[i]] <- fdobj[i] return(fdlist) }
pafdR_build.exercise <- function(lan = 'en', exercise.folder = 'pafdR-exercises', pdf.folder = 'pdf out'){ my.pdftex.flag <- pafdR_check.pdflatex() if (!my.pdftex.flag) { stop('cant find pdflatex.exe! Check your latex installation and also if the command is available at userpath') } if (class(lan)!= 'character') { stop('Class of en should be character') } possible.lan <- c('en', 'pt-br') if ( !(lan %in% possible.lan)) { stop(paste('Input lan (language) should be one of:', paste(possible.lan,collapse = ', '))) } if (class(exercise.folder) != 'character') { stop('Class of exercise.folder should be character') } my.f <- list.files(path = exercise.folder,pattern = '*.Rnw', full.names = T) if (length(my.f)==0) { cat('Cant find any .Rnw files in ', exercise.folder, '. Will download it with pafdR_download.exercises()') pafdR_download.exercises(exercise.folder = exercise.folder) } my.f <- list.files(path = exercise.folder, pattern = '*.Rnw', full.names = T) if (lan == 'pt-br'){ my.template <- system.file('ext/myexam_pt-br.tex', package = 'pafdR') my.fig.input <- system.file('ext/CAPADigital_DadosFinanceirosR.jpg', package = 'pafdR') my.name <- paste0('pafdR-Exercicios-',lan,'-',Sys.Date()) } if (lan == 'en') { my.template <- system.file('ext/myexam_en.tex', package = 'pafdR') my.fig.input <- system.file('ext/CAPADigital_FinancialDataR.jpg', package = 'pafdR') my.name <- paste0('pafdR-Exercises-',lan,'-',Sys.Date()) } n.q <- length(my.f) n.ver <- 1 lan <<- lan cat('\npafdR - Building Exercises') my.exam <- exams::exams2pdf(file = my.f, n=n.ver, name=my.name, inputs = my.fig.input, template = my.template, language = lan, institution = '', title = 'R Exercises', course = '', duplex = T, encoding = 'UTF-8', dir = pdf.folder, date = Sys.Date(), intro = '', verbose = FALSE) file.rename(from = paste0(pdf.folder,'/',my.name,'1','.pdf'), to = paste0(pdf.folder,'/',my.name, '.pdf')) df.answer.key <- data.frame() exam.now <- my.exam[[1]] n.q <- length(exam.now) for (i.q in seq(n.q)){ sol.now <- letters[which(exam.now[[i.q]]$metainfo$solution)] temp <- data.frame(Question = i.q, Solution = sol.now) df.answer.key <- rbind(df.answer.key, temp) } sol.file <- paste0(pdf.folder,'/Solutions_', my.name,'.txt') utils::write.csv(x = df.answer.key, file = sol.file, row.names = FALSE) cat('\n\nDONE!\n') cat('Pdf file available at: ',paste0(pdf.folder,'/',my.name)) cat('\nSolutions available at: ',sol.file) return(TRUE) }
base64_encode <- function(x) { if(is.null(x)) return("") return(cpp_base64_encode(x)) }
setGeneric("graphdf", function(x, ...) { standardGeneric("graphdf") }) setMethod( "graphdf", signature = "list", definition = function(x, ...) { results <- vector("list", length(x)) for (i in 1:length(x)) { thisGraph <- x[[i]] if (is_igraph(thisGraph)) { results[[i]] <- list() results[[i]]$v <- data.frame(sapply(names(vertex_attr(thisGraph)), function(z) { vertex_attr(thisGraph, z) }), stringsAsFactors = FALSE) results[[i]]$e <- data.frame(as_edgelist(thisGraph), sapply(names(edge_attr(thisGraph)), function(z) { edge_attr(thisGraph, z) }), stringsAsFactors = FALSE) edgeDfNames <- names(results[[i]]$e) names(results[[i]]$e) <- c("e1", "e2", edgeDfNames[3:length(edgeDfNames)]) results[[i]]$e <- as.data.frame(sapply(results[[i]]$e, as.character), stringsAsFactors = FALSE) results[[i]]$v <- as.data.frame(sapply(results[[i]]$v, as.character), stringsAsFactors = FALSE) results[[i]]$e <- as.data.frame(lapply(results[[i]]$e, function(z) { type.convert(z, as.is = TRUE) }), stringsAsFactors = FALSE) results[[i]]$v <- as.data.frame(lapply(results[[i]]$v, function(z) { type.convert(z, as.is = TRUE) }), stringsAsFactors = FALSE) } else { results[[i]]$v <- NA results[[i]]$e <- NA } } return(results) }) setMethod( "graphdf", signature = "goc", definition = function(x, ...) { theseGraphs <- lapply(x@th, function(z) z$goc) graphdf(theseGraphs) }) setMethod( "graphdf", signature = "grain", definition = function(x, ...) { theseGraphs <- list(x@th) graphdf(theseGraphs) }) setMethod( "graphdf", signature = "mpg", definition = function(x, ...) { theseGraphs <- vector("list", 1) theseGraphs[[1]] <- x@mpg graphdf(theseGraphs) }) setMethod( "graphdf", signature = "igraph", definition = function(x, ...) { theseGraphs <- vector("list", 1) theseGraphs[[1]] <- x graphdf(theseGraphs) })
sugm.select <- function(est, criterion = "stars", stars.subsample.ratio = NULL, stars.thresh = 0.1, rep.num = 20, fold = 5, loss="likelihood", verbose = TRUE) { if(est$method!="clime" && est$method!="tiger") { cat("\"method\" must be either \"clime\" or \"tiger\" \n") return(NULL) } gcinfo(FALSE) if(est$cov.input){ cat("Model selection is not available when using the covariance matrix as input.") class(est) = "select" return(est) } if(!est$cov.input) { if(is.null(criterion)) criterion = "stars" n = nrow(est$data) d = ncol(est$data) nlambda = length(est$lambda) if(criterion == "cv"){ if(verbose) { cat("Conducting cross validation (cv) selection....\n") flush.console() } out = sugm.cv(est, loss=loss, fold = fold) est$opt.lambda = out$lambda.opt est$opt.index = out$opt.idx rm(out) gc() if(verbose){ cat("done\n") flush.console() } if(verbose) { cat("Computing the optimal graph....\n") flush.console() } if(est$method == "clime") out = sugm(est$data, lambda = est$opt.lambda, method = "clime", sym = est$sym, verbose = FALSE, standardize=est$standardize) if(est$method == "tiger") out = sugm(est$data, lambda = est$opt.lambda, method = "tiger", sym = est$sym, verbose = FALSE, standardize=est$standardize) est$refit = est$path[[est$opt.index]] est$opt.sparsity=sum(est$refit)/d/(d-1) est$opt.icov = est$icov[[est$opt.index]] if(verbose){ cat("done\n") flush.console() } } if(criterion == "stars"){ if(is.null(stars.subsample.ratio)) { if(n>144) stars.subsample.ratio = 10*sqrt(n)/n if(n<=144) stars.subsample.ratio = 0.8 } est$merge = list() for(i in 1:nlambda) est$merge[[i]] = Matrix(0,d,d) for(i in 1:rep.num) { if(verbose) { mes <- paste(c("Conducting Subsampling....in progress:", floor(100*i/rep.num), "%"), collapse="") cat(mes, "\r") flush.console() } ind.sample = sample(c(1:n), floor(n*stars.subsample.ratio), replace=FALSE) if(est$method == "clime") tmp = sugm(est$data[ind.sample,], lambda = est$lambda, method = "clime", sym = est$sym, verbose = FALSE, standardize=est$standardize)$path if(est$method == "tiger") tmp = sugm(est$data[ind.sample,], lambda = est$lambda, method = "tiger", sym = est$sym, verbose = FALSE, standardize=est$standardize)$path for(i in 1:nlambda) est$merge[[i]] = est$merge[[i]] + tmp[[i]] rm(ind.sample,tmp) gc() } if(verbose){ cat("\n") mes = "Conducting Subsampling....done. " cat(mes, "\r") cat("\n") flush.console() } est$variability = rep(0,nlambda) for(i in 1:nlambda){ est$merge[[i]] = est$merge[[i]]/rep.num est$variability[i] = 4*sum(est$merge[[i]]*(1-est$merge[[i]]))/(d*(d-1)) } est$opt.index = max(which.max(est$variability >= stars.thresh)[1]-1,1) est$refit = est$path[[est$opt.index]] est$opt.lambda = est$lambda[est$opt.index] est$opt.sparsity = est$sparsity[est$opt.index] est$opt.icov = est$icov[[est$opt.index]] } est$criterion = criterion class(est) = "select" return(est) } } print.select = function(x, ...) { if(x$cov.input){ cat("Model selection is not available when using the covariance matrix as input.\n") return(NULL) } if(!x$cov.input) { if(x$method == "clime") cat("Method: CLIME\n") else cat("Method: SLasso\n") cat("selection criterion:",x$criterion,"\n") cat("Graph dimension:",ncol(x$data),"\n") cat("sparsity level:", x$opt.sparsity,"\n") cat("optimal paramter:", x$opt.lambda,"\n") } if(x$criterion == "cv") cat("cross validation loss used:",x$loss) } plot.select = function(x, ...){ if(x$cov.input){ cat("Model selection is not available when using the covariance matrix as input.\n") return(NULL) } if(!x$cov.input) { par(mfrow=c(1,2), pty = "s", omi=c(0.3,0.3,0.3,0.3), mai = c(0.3,0.3,0.3,0.3)) g = graph.adjacency(as.matrix(x$refit), mode="undirected", diag=FALSE) layout.grid = layout.fruchterman.reingold(g) plot(g, layout=layout.grid, edge.color='gray50',vertex.color="red", vertex.size=3, vertex.label=NA) plot(x$lambda, x$sparsity, log = "x", xlab = "Regularization Parameter", ylab = "Sparsity Level", type = "l",xlim = rev(range(x$lambda)), main = "Solution path sparsity levels") lines(x$opt.lambda,x$opt.sparsity,type = "p") } }
prune_sg<-function(g, level=1, verbose=FALSE) { if(!is(g,"sg")) stop("g not sg object.") if(is.null(level)) return(g) if(level<=0)return(g) g <- sg2sym(g) edges <- prune_c(g$edges, level, verbose) as.sg(edges,type=g$type, pars=g$parameters, note=c(g$note, paste("pruned with level=", as.integer(level),sep=""))) }
sample_CPT<-function(CPT,states) { probs<-1:ncol(CPT[[2]]) for(i in 1:length(states)) probs<-probs[-(which(!CPT[[2]][i,]==states[i]))] probs<-cumsum(CPT[[1]][,probs]) rando<-runif(1) if(length(probs)==0) {warning("State combination ",states," doesn't exist in CPT.") return(NA)} state<-names(probs)[min(which(probs>rando))] return(state) }
william <- function(numattable1, crds1, board1, round1, currentbet, mychips1, pot1, roundbets, blinds1, chips1, ind1, dealer1, tablesleft){ a1 = 0 if(mychips1 < 3*blinds1) a1 = mychips1 if((crds1[1,1] == 14) && (crds1[2,1] == 14)) a1 = mychips1 if((crds1[1,1] == 10) && (crds1[2,1] == 9)){ u1 = runif(1) if(u1 < .4) a1 = mychips1 if(u1 > .4) a1 = 0 } if(currentbet == blinds1) a1 = mychips1 if((crds1[1,1] == crds1[2,1]) && (crds1[1,1] > 11.5) && (numattable1<10)) a1 = mychips1 a1 }
hl2_test <- function(x, y, alternative = c("two.sided", "greater", "less"), delta = ifelse(var.test, 1, 0), method = c("asymptotic", "permutation", "randomization"), scale = c("S1", "S2"), n.rep = 10000, na.rm = FALSE, var.test = FALSE, wobble = FALSE, wobble.seed = NULL) { check_test_input(x = x, y = y, alternative = alternative, delta = delta, method = method, scale = scale, n.rep = n.rep, na.rm = na.rm, var.test = var.test, wobble = wobble, wobble.seed = wobble.seed, test.name = "hl2_test") dname <- paste(deparse(substitute(x)), "and", deparse(substitute(y))) alternative <- match.arg(alternative) scale <- match.arg(scale) prep <- preprocess_data(x = x, y = y, delta = delta, na.rm = na.rm, wobble = wobble, wobble.seed = wobble.seed, var.test = var.test) if (!all(is.na(prep))) { x <- prep$x y <- prep$y delta <- prep$delta } else { return(NA) } if (scale == "S1") { type <- "HL21" } else if (scale == "S2") { type <- "HL22" } method <- select_method(x = x, y = y, method = method, test.name = "hl2_test", n.rep = n.rep) if (method %in% c("permutation", "randomization")) { n.rep <- min(choose(length(x) + length(y), length(x)), n.rep) test.results <- compute_results_finite(x = x, y = y, alternative = alternative, delta = delta, method = method, type = type, n.rep = n.rep) } else if (method == "asymptotic") { test.results <- compute_results_asymptotic(x = x, y = y, alternative = alternative, delta = delta, type = type) } statistic <- test.results$statistic estimates <- test.results$estimates p.value <- test.results$p.value if (var.test) { names(estimates) <- c("HL2 of log(x^2) and log(y^2)") names(delta) <- "ratio of squared scale parameters" delta <- exp(delta) } else { names(estimates) <- c("HL2 of x and y") names(delta) <- "location shift" } names(statistic) <- ifelse(var.test, "S", "D") if (method == "randomization") { method <- paste0("Randomization test based on HL2-estimator ", "(", n.rep, " random permutations)") } else if (method == "permutation") { method <- "Exact permutation test based on HL2-estimator" } else method <- "Asymptotic test based on HL2-estimator" res <- list(statistic = statistic, parameter = NULL, p.value = p.value, estimate = estimates, null.value = delta, alternative = alternative, method = method, data.name = dname) class(res) <- "htest" return(res) }
.onLoad <- function(libname, pkgname) { options("DataPackageR_interact" = interactive()) options("DataPackageR_packagebuilding" = FALSE) }
create.dmdf=function(x,parameter,time.varying=NULL,fields=NULL) { last= -parameter$num begin.num=parameter$begin+1 chp = process.ch(x$data$ch) firstseen=chp$first nocc=x$nocc time.intervals=x$time.intervals if(is.null(x$data$begin.time)) begin.time=x$begin.time else begin.time=x$data$begin.time df=create.base.dmdf(x,parameter) if(is.null(df))return(NULL) if(length(begin.time)==1 & is.vector(time.intervals)) { timedf=data.frame(occ=1:nocc,time=begin.time+c(0,cumsum(time.intervals))) if(parameter$interval) if(!all(time.intervals==1)) timedf$time.interval=c(time.intervals,NA) df=merge(df,timedf,by="occ") timedf=data.frame(id=x$data$id,cohort=timedf$time[firstseen]) df=merge(df,timedf,by="id") }else { if(length(begin.time)==1) begin.time=rep(begin.time,nrow(x$data)) if(is.vector(time.intervals)) time.intervals=matrix(time.intervals,nrow=nrow(x$data),ncol=length(time.intervals),byrow=TRUE) cum.time.intervals=t(apply(time.intervals,1,cumsum)) times=begin.time+cbind(rep(0,nrow(x$data)),cum.time.intervals) cohort=rep(times[cbind(1:nrow(x$data),firstseen)],each=ncol(times)) if(!parameter$interval | all(time.intervals==1)) timedf=data.frame(occid=apply(expand.grid(occ=1:nocc,id=factor(1:nrow(x$data))),1,paste,collapse=""), cohort=cohort,time=as.vector(t(times))) else timedf=data.frame(occid=apply(expand.grid(occ=1:nocc,id=factor(1:nrow(x$data))),1,paste,collapse=""), cohort=cohort,time=as.vector(t(times)),time.interval=as.vector(t(cbind(time.intervals,rep(NA,nrow(x$data)))))) df$occid=paste(df$occ,df$id,sep="") df=merge(df,timedf,by="occid") df$idocc=NULL } if(any(time.intervals==0)) { firstocc=data.frame(id=factor(1:nrow(x$data)),firstocc=firstseen) df=merge(df,firstocc,by="id") } df=df[order(df$seq),] df$age=df$time-df$cohort if(!is.null(x$data$initial.age)) df$age=df$age+x$data$initial.age[df$id] times=df$time[df$id==1] tcv=NULL if(!is.null(time.varying)) { for (i in 1:length(time.varying)) { vnames=paste(time.varying[i],times,sep="") if( !all(vnames %in% names(x$data))) stop("Missing time varying variable ",paste(vnames[!vnames%in%names(x$data)],collapse=",")) if(i==1) tcv=data.frame(as.vector(t(as.matrix(x$data[, vnames])))) else tcv=cbind(tcv,as.vector(t(as.matrix(x$data[, vnames])))) x$data=x$data[,!names(x$data)%in%vnames] } names(tcv)=time.varying } if(!is.null(tcv)) df=cbind(df,tcv) if(is.null(fields)) fields=names(x$data)[!names(x$data)%in%c("ch","initial.age")] else fields=c(fields,"id") df=merge(df,x$data[,fields],by="id") df=df[order(df$seq),] df$seq=NULL df$Time=df$time-min(df$time) df$Cohort=df$cohort-min(df$cohort) df$age[df$age<0]=0 df$time=factor(df$time) df$Age=df$age df$age=factor(df$age) df$cohort=factor(df$cohort) if("group"%in%names(df)) levels(df$group)=apply(x$group.covariates,1,paste,collapse="") if(!is.null(x$strata_data)&!is.null(df$stratum)) df=cbind(df,x$strata_data) return(df) } create.base.dmdf=function(x,parameter) { last= -parameter$num begin.num=parameter$begin+1 nocc=x$nocc + parameter$num occasions=begin.num:(parameter$begin+nocc) sl=factor(x$strata.labels) nstrata=length(sl) if(nchar(x$model)>=4 & substr(x$model,1,4)=="MVMS") { if(!is.null(parameter$obs) & parameter$obs) dfl=mvms_design_data(x$strata.list$df.states,x$strata.list$df,transition=parameter$tostrata) else dfl=mvms_design_data(x$strata.list$df.states,transition=parameter$tostrata) df=expand.grid(occ=occasions,id=factor(1:nrow(x$data))) dfl=dfl[rep(1:nrow(dfl),each=nrow(df)),,drop=FALSE] df=cbind(df,dfl) if(parameter$tostrata) df=df[order(df$id,df$occ,df$stratum,df$tostratum),] else df=df[order(df$id,df$occ,df$stratum),] df$seq=1:nrow(df) rownames(df)=df$seq } else { if(!is.null(x$strata.list)) { st.index=which("states"==names(x$strata.list)) oth.index=which("states"!=names(x$strata.list)) oth.name=names(x$strata.list)[oth.index] oth=factor(rep(x$strata.list[[oth.index]],each=length(x$strata.list$states))) states=factor(rep(x$strata.list$states,times=length(x$strata.list[[oth.index]]))) if(parameter$whichlevel!=0) { if(parameter$whichlevel==1) sl=factor(x$strata.list$states) else sl=factor(x$strata.list[[oth.index]]) nstrata=length(sl) x$strata.list=NULL } } if(!parameter$bystratum) { df=expand.grid(occ=occasions,id=factor(1:nrow(x$data))) df$seq=1:nrow(df) } else { if(parameter$tostrata) { df=expand.grid(tostratum=sl[1:nstrata],stratum=sl[1:nstrata],occ=occasions,id=factor(1:nrow(x$data))) df=df[,c("stratum","tostratum","occ","id")] df$seq=1:nrow(df) } else { df=expand.grid(stratum=sl[1:nstrata],occ=occasions,id=factor(1:nrow(x$data))) df$seq=1:nrow(df) } if(!is.null(x$strata.list)) { state.df=data.frame(stratum=sl[1:nstrata],state=states,oth=oth) df=merge(df,state.df,by="stratum") if(parameter$tostrata) { state.df=data.frame(tostratum=sl[1:nstrata],tostate=states,tooth=oth) df=merge(df,state.df,by="tostratum") } if(parameter$whichlevel!=0) { if(parameter$whichlevel==1){ names(df)[names(df)=="stratum"]="state" names(df)[names(df)=="tostratum"]="tostate" } if(parameter$whichlevel==2) { names(df)[names(df)=="stratum"]=oth.name names(df)[names(df)=="tostratum"]=paste("to",oth.name,sep="") } }else { names(df)[names(df)=="oth"]=oth.name names(df)[names(df)=="tooth"]=paste("to",oth.name,sep="") } }else { if(parameter$whichlevel!=0) { if(parameter$whichlevel==1)names(df)[names(df)=="stratum"]="state" if(parameter$whichlevel==2) names(df)[names(df)=="stratum"]=oth.name if(parameter$whichlevel==1)names(df)[names(df)=="tostratum"]="tostate" if(parameter$whichlevel==2) names(df)[names(df)=="tostratum"]=paste("to",oth.name,sep="") } } } } return(df) }
setMethod(f = "terms", signature(x = "fREG"), definition = function(x, ...) { ans <- stats::terms(slot(x, "fit"), ...) ans })
pa2conr <- function(x) { fix=as.logical(x) x=replace(x,list=which(x==1),0) un=setdiff(unique(x),0) y=matrix(0,0,length(x)) for(i in un) { z=which(x==i) for(j in 2:length(z)) { k=rep(0,length(x)) k[z[1]]=1 k[z[j]]=-1 y=rbind(y,k) } } pa = list(free=fix,conr=y) return(pa) }
if (interactive()) savehistory(); library("aroma.affymetrix"); log <- Verbose(threshold=-10, timestamp=TRUE); chipType <- "Mapping50K_Xba240"; footer <- list( createdOn = format(Sys.time(), "%Y%m%d %H:%M:%S", usetz=TRUE), createdBy = list( fullname = "Henrik Bengtsson", email = sprintf("%s@%s", "henrik.bengtsson", "aroma-project.org") ), srcFiles = list() ); cdf <- AffymetrixCdfFile$byChipType(chipType); print(cdf); ptb <- AffymetrixProbeTabFile$byChipType(chipType); print(ptb); acs <- AromaCellSequenceFile$allocateFromCdf(cdf, tags="*,HB20080803"); print(acs); importFrom(acs, ptb, verbose=log); inferMmFromPm(acs, cdf=cdf, verbose=log);
pptxListInput=function(id){ ns=NS(id) tagList( checkboxInput("showpreprocessing","show preprocessing"), conditionalPanel(condition="input.showpreprocessing==true", textAreaInput(ns("preprocessing"),"preprocessing",value="",width='100%',height = '100%') ), uiOutput(ns("pptListUI")) ) }
context("Testing qsub_lapply") skip_on_cran() if (Sys.getenv("PRISM_HOST") != "") { host <- Sys.getenv("PRISM_HOST") remote_tmp_path <- Sys.getenv("PRISM_REMOTEPATH") qsub_config <- create_qsub_config( remote = host, local_tmp_path = tempfile(), remote_tmp_path = remote_tmp_path ) } else if (file.exists(config_file_location())) { qsub_config <- get_default_qsub_config() } else { qsub_config <- NULL } if (!is.null(qsub_config)) { test_that("qsub_lapply works", { out <- qsub_lapply(2:4, function(i) i + 1, qsub_config = qsub_config) expect_equal(out, list(3,4,5)) }) test_that("qsub_lapply works with characters X", { out <- qsub_lapply(letters, function(char) paste0("__", char, "__"), qsub_config = qsub_config) expect_equal(out, as.list(paste0("__", letters, "__"))) }) test_that("batch_tasks functionality", { out <- qsub_lapply( X = seq_len(100000), FUN = function(i) i + 1, qsub_config = override_qsub_config(qsub_config, batch_tasks = 10000) ) expect_equal(out, as.list(seq_len(100000) + 1)) }) test_that("wait functionality works", { handle <- qsub_lapply(2:4, function(i) i + 1, qsub_config = override_qsub_config(qsub_config, wait = FALSE)) out <- qsub_retrieve(handle) expect_equal(out, list(3,4,5)) }) test_that("postfun functionality works", { handle <- qsub_lapply(2:4, function(i) i + 1, qsub_config = override_qsub_config(qsub_config, wait = FALSE)) out <- qsub_retrieve(handle, post_fun = function(index, out) out * 2) expect_equal(out, list(6,8,10)) }) test_that("postfun in combination with batch tasks works", { handle <- qsub_lapply( X = seq_len(100000), FUN = function(i) i + 1, qsub_config = override_qsub_config(qsub_config, wait = FALSE, batch_tasks = 10000) ) out <- qsub_retrieve(handle, post_fun = function(index, out) out * 2) expect_equal(out, as.list((seq_len(100000) + 1)*2)) }) test_that("environment objects are passed correctly", { y <- 10 out <- qsub_lapply( X = seq_len(4)+5, FUN = function(i) i + y, qsub_config = override_qsub_config(qsub_config), qsub_environment = c("y") ) expect_equal(out, as.list(seq_len(4) + 15)) }) }
plot_evpi <- function(EVPIresults, decision_vars, input_table = NULL, new_names = NULL, unit = NULL, x_axis_name = "Expected Value of Perfect Information", y_axis_name = NULL, bar_color = "cadetblue", base_size = 11, ...){ assertthat::assert_that("EVPI_outputs" %in% class(EVPIresults), msg = "EVPIresults is not class 'EVPI_outputs', please provide a valid object. This does not appear to have been generated with the 'multi_EVPI' function.") if (!is.null(input_table)) assertthat::assert_that(any(class(input_table) %in% c("tbl_df", "tbl", "data.frame")), msg = "The input_table is not a data.frame or tibble (tbl, tbl_df) class, please provide a valid object.") full_evpi_data <- NULL for (i in 1:length(EVPIresults)) { data <- EVPIresults[[i]] data["output_variable"] <- names(EVPIresults)[i] if (is.null(full_evpi_data)) full_evpi_data <- data else full_evpi_data <- dplyr::bind_rows(full_evpi_data, data) } rownames(full_evpi_data) <- NULL if (!(is.null(input_table))) combined_table <- dplyr::left_join(full_evpi_data, input_table, by = c( "variable" = "variable")) else combined_table <- full_evpi_data assertthat::assert_that(any(decision_vars %in% combined_table$output_variable), msg = "The names provided for decision_vars do not match the names in the EVPIresults. Make sure that they are in the EVPIresults and are spelled correctly.") filtered_table <- dplyr::filter(combined_table, EVPI > 0) data <- dplyr::filter(filtered_table, output_variable %in% decision_vars) if(nrow(data) == 0) { warning("There are no variables with a positive EVPI. You probably do not need a plot for that.", call. = FALSE) return(invisible(NULL)) } if (is.null(new_names)) decision_labels <- decision_vars else decision_labels <- new_names data$output_variable <- factor(data$output_variable, levels = decision_vars, labels = decision_labels) if (!is.null(input_table)) y_axis <- "label" else y_axis <- "variable" if (is.null(unit)) unit <- "" ggplot2::ggplot(data, ggplot2::aes(x = EVPI, y = stats::reorder(!!ggplot2::ensym(y_axis), EVPI))) + ggplot2::geom_col(fill = bar_color) + ggplot2::scale_x_continuous(expand = ggplot2::expansion(mult = c(0, 0.01)), labels = scales::dollar_format(prefix = unit)) + ggplot2::labs(y = y_axis_name, x = x_axis_name) + ggplot2::facet_wrap( ~ output_variable, scales = "free") + ggplot2::theme_bw(base_size = base_size) + ggplot2::theme(strip.background = ggplot2::element_blank()) + ggplot2::theme(...) }
test_that("tangram", { scene <- system.file("examples/tangram/www/scene.yaml", package = "leaflet.extras2") m <- leaflet() %>% addTangram(scene = scene, group = "tangram") %>% setView(11, 49.4, 14) expect_is(m, "leaflet") deps <- findDependencies(m) expect_equal(deps[[length(deps)]]$name, "lfx-tangram") expect_equal(m$x$calls[[length(m$x$calls)]]$method, "addTangram") }) test_that("tangram-error", { expect_error( leaflet() %>% addTangram() ) expect_error( leaflet() %>% addTangram(scene = NULL) ) expect_error( leaflet() %>% addTangram(scene = "scene") ) })
dbIsReadOnly_DBIConnector <- function(dbObj, ...) { dbIsReadOnly([email protected], ...) } setMethod("dbIsReadOnly", signature("DBIConnector"), dbIsReadOnly_DBIConnector)
Echidna_simulate_repertoire <- function(initial.size.of.repertoire, species, cell.type, cd4.proportion, duration.of.evolution, complete.duration, vdj.productive, vdj.model, vdj.insertion.mean, vdj.insertion.stdv, vdj.branch.prob, clonal.selection, cell.division.prob, sequence.selection.prob, special.v.gene, class.switch.prob, class.switch.selection.dependent, class.switch.independent, SHM.method, SHM.nuc.prob, SHM.isotype.dependent, SHM.phenotype.dependent, max.cell.number, max.clonotype.number, death.rate, igraph.on, transcriptome.on, transcriptome.switch.independent, transcriptome.switch.prob, transcriptome.switch.isotype.dependent, transcriptome.switch.SHM.dependent, transcriptome.switch.selection.dependent, transcriptome.states, transcriptome.noise, seq.name ){ load(url("https://polybox.ethz.ch/index.php/s/zETU3ruyfTjj8T8/download")) class_switch_prob_hum <- class_switch_prob_hum class_switch_prob_mus <- class_switch_prob_mus hum_b_h <- hum_b_h hum_b_l <- hum_b_l hum_b_trans <- hum_b_trans hum_t_h <- hum_t_h hum_t_l <- hum_t_l hum_t_trans <- hum_t_trans iso_SHM_prob <- iso_SHM_prob mus_b_h <- mus_b_h mus_b_l <- mus_b_l mus_b_trans <- mus_b_trans mus_t_h <- mus_t_h mus_t_l <- mus_t_l mus_t_trans <- mus_t_trans pheno_SHM_prob <- pheno_SHM_prob productive_seq <- productive_seq special_v <- special_v trans_switch_prob_b <- trans_switch_prob_b trans_switch_prob_t <- trans_switch_prob_t vae_seq <- vae_seq if(missing(species)) species<-"mus" if(missing(cell.type)) cell.type<-"B" if(missing(cd4.proportion)) cd4.proportion<-1 if(missing(vdj.productive)) vdj.productive<-"random" if(missing(initial.size.of.repertoire)) initial.size.of.repertoire <- 10 if(missing(vdj.model)) vdj.model <- "naive" if(missing(vdj.insertion.mean)) vdj.insertion.mean<-0.1 if(missing(vdj.insertion.stdv)) vdj.insertion.stdv<-0.05 if(missing(duration.of.evolution)) duration.of.evolution<-20 if(missing(complete.duration)) complete.duration<-T if(missing(vdj.branch.prob)) vdj.branch.prob<-0.2 if(missing(clonal.selection)) clonal.selection<-F if(missing(cell.division.prob)) cell.division.prob<-c(0.1,0.1) if(missing(sequence.selection.prob)) sequence.selection.prob<-0.01 if(missing(special.v.gene)) special.v.gene<-F if(missing(class.switch.prob)&species=="mus") class.switch.prob<-class_switch_prob_mus if(missing(class.switch.prob)&species=="hum") class.switch.prob<-class_switch_prob_hum if(missing(class.switch.selection.dependent)) class.switch.selection.dependent<-F if(missing(class.switch.independent)) class.switch.independent<-T if(missing(SHM.nuc.prob)) SHM.nuc.prob<-0.001 if(missing(SHM.method)) SHM.method<-"naive" if(missing(SHM.isotype.dependent)) SHM.isotype.dependent<-F if(missing(SHM.phenotype.dependent)) SHM.phenotype.dependent<-F if(missing(max.cell.number)) max.cell.number<-1500 if(missing(max.clonotype.number)) max.clonotype.number<-20 if(missing(death.rate)) death.rate<-0.001 if(missing(igraph.on)&cell.type=="T") igraph.on<-F if(missing(igraph.on)&cell.type=="B") igraph.on<-T if(missing(transcriptome.on)) transcriptome.on<-T if(missing(transcriptome.switch.independent)) transcriptome.switch.independent<-T if(missing(transcriptome.switch.prob)&cell.type=="B") transcriptome.switch.prob<-trans_switch_prob_b if(missing(transcriptome.switch.prob)&cell.type=="T") transcriptome.switch.prob<-trans_switch_prob_t if(missing(transcriptome.switch.isotype.dependent)) transcriptome.switch.isotype.dependent<-F if(missing(transcriptome.switch.SHM.dependent)) transcriptome.switch.SHM.dependent<-F if(missing(transcriptome.switch.selection.dependent)) transcriptome.switch.selection.dependent<-F if(missing(transcriptome.states)&species=="mus"&cell.type=="B") transcriptome.states<-mus_b_trans if(missing(transcriptome.states)&species=="hum"&cell.type=="B") transcriptome.states<-hum_b_trans if(missing(transcriptome.states)&species=="mus"&cell.type=="T") transcriptome.states<-mus_t_trans if(missing(transcriptome.states)&species=="hum"&cell.type=="T") transcriptome.states<-hum_t_trans if(missing(transcriptome.noise)) transcriptome.noise<-"rnorm(nrow(transcriptome.states), mean = 1, sd = 0.3)" if(missing(seq.name)) seq.name<-F if(is.logical(transcriptome.switch.independent) & is.logical(transcriptome.switch.isotype.dependent) &is.logical( transcriptome.switch.SHM.dependent)==F){ stop("transcriptome.switch.* is a T or F input") } if(!(nrow(transcriptome.states)==length(eval(parse(text=transcriptome.noise))))){ stop("transcriptome.noise vector length different from transcriptome gene length") } if(length(transcriptome.noise)>1&length(transcriptome.noise)!=ncol(transcriptome.states)){ stop("plase assgin as many transcriptome.noise vector items as the number of transcriptome.states or just assign a common one") } if(!missing(transcriptome.states)) cd4.proportion<-1 if(all(!(rownames(transcriptome.switch.prob) == colnames(transcriptome.switch.prob)&rownames(transcriptome.switch.prob) == colnames(transcriptome.states)))){ stop("transcriptome.state colnames and transcriptome.switch.prob colnames and rownames should be the same") } if(length(cell.division.prob)==1){ cell.division.prob<-c(cell.division.prob,cell.division.prob) } if(max(cell.division.prob)>1){ stop("Any value in cell.division.prob should less than 1") } barcode<-c() raw_contig_id<-c() chain<-c() v_gene<-c() d_gene<-c() j_gene<-c() c_gene<-c() v_seq<-c() d_seq<-c() j_seq<-c() ref_seq<-c() raw_clonotype_id<-c() clonotype_num<-initial.size.of.repertoire seq<-c() Length<-c() clonotype_id<-c() seq_combi<-c() barcode_uniq<-c() gen<-c() isotype<-c() seq.number<-c() seq.history<-c() igraph.index<-list() barcode.history<-c() igraph.index.attr<-list() igraph_list<-list() igraph.index.jr<-list() colors<-colors pie.values.list<-list() pie.values<-list() trans_dis<-c() trans_ls<-list() trans_state<-c() trans_state_his<-c() igraph_list_trans<-list() pie_values_trans_list<-list() cd4_prob<-cd4.proportion cdr3<-c() cdr3_nt<-c() selected_seq<-c() selected_rate<-c() if(length(transcriptome.noise)>1){ for (i in 1:ncol(transcriptome.states)){ trans_dis[i]<-paste0("transcriptome.states[",i,"]*",transcriptome.noise[i]) } } if(length(transcriptome.noise)==1){ for (i in 1:ncol(transcriptome.states)){ trans_dis[i]<-paste0("transcriptome.states[",i,"]*",transcriptome.noise) } } if(cell.type=="B" & species=="mus" | species=="mouse" | species=="blc6"){ if(vdj.productive=="random"){ seq_input_h<-mus_b_h seq_input_l<-mus_b_l } if(vdj.productive=="naive"){ seq_input_h<-productive_seq$mus_b_h seq_input_l<-productive_seq$mus_b_l } if(vdj.productive=="vae"){ seq_input_h<-vae_seq$mus_b_h seq_input_l<-vae_seq$mus_b_l } c_genename_h<-"IGHM" } if(cell.type=="B" & species=="hum" | species=="human"){ if(vdj.productive=="random"){ seq_input_h<-hum_b_h seq_input_l<-hum_b_l } if(vdj.productive=="naive"){ seq_input_h<-productive_seq$hum_b_h seq_input_l<-productive_seq$hum_b_l } c_genename_h<-"IGHM" if(vdj.productive=="vae"){ seq_input_h<-vae_seq$hum_b_h seq_input_l<-vae_seq$hum_b_l } c_genename_h<-"IGHM" } if(cell.type=="T" & species=="mus" | species=="mouse" | species=="blc6" ){ if(vdj.productive=="random"){ seq_input_h<-mus_t_h seq_input_l<-mus_t_l } if(vdj.productive=="naive"){ seq_input_h<-productive_seq$mus_t_h seq_input_l<-productive_seq$mus_t_l } if(vdj.productive=="vae"){ seq_input_h<-vae_seq$mus_t_h seq_input_l<-vae_seq$mus_t_l } c_genename_h<-"TRBC" } if(cell.type=="T" & species=="hum" | species=="human"){ if(vdj.productive=="random"){ seq_input_h<-hum_t_h seq_input_l<-hum_t_l } if(vdj.productive=="naive"){ seq_input_h<-productive_seq$hum_t_h seq_input_l<-productive_seq$hum_t_l } if(vdj.productive=="vae"){ seq_input_h<-vae_seq$hum_t_h seq_input_l<-vae_seq$hum_t_l } c_genename_h<-"TRBC" } barcode<-.GENERATE.BARCODE(initial.size.of.repertoire) for(i in 1:initial.size.of.repertoire){ if(vdj.productive=="random"){ indV <- sample(x = 1:nrow(seq_input_h[[1]]),size = 1,replace = FALSE) indD <- sample(x = 1:nrow(seq_input_h[[2]]),size = 1,replace=FALSE) indJ <- sample(x = 1:nrow(seq_input_h[[3]]),size = 1,replace=FALSE) indVL <- sample(x = 1:nrow(seq_input_l[[1]]),size = 1,replace = FALSE) indJL <- sample(x = 1:nrow(seq_input_l[[2]]),size=1,replace=FALSE) seq[2*i-1]<- as.character(.VDJ_RECOMBIN_FUNCTION(as.character(seq_input_h[[1]][[2]][indV]),as.character(seq_input_h[[2]][[2]][indD]),as.character(seq_input_h[[3]][[2]][indJ]), method=vdj.model, chain.type="heavy", species=species, vdj.insertion.mean=vdj.insertion.mean, vdj.insertion.stdv=vdj.insertion.stdv)) seq[2*i] <- as.character(.VDJ_RECOMBIN_FUNCTION(as.character(seq_input_l[[1]][[2]][indVL]),"",as.character(seq_input_l[[2]][[2]][indJL]), method=vdj.model, chain.type="light", species=species, vdj.insertion.mean=vdj.insertion.mean, vdj.insertion.stdv=vdj.insertion.stdv)) v_gene[2*i-1]<-as.character(seq_input_h[[1]][[1]][indV]) v_gene[2*i]<-as.character(seq_input_l[[1]][[1]][indVL]) d_gene[2*i-1]<-as.character(seq_input_h[[2]][[1]][indD]) d_gene[2*i]<-"None" j_gene[2*i-1]<-as.character(seq_input_h[[3]][[1]][indJ]) j_gene[2*i]<-as.character(seq_input_l[[2]][[1]][indJL]) c_gene[2*i-1]<-c_genename_h c_gene[2*i]<-paste0(stringr::str_sub(v_gene[2*i],1,3),"C") v_seq[2*i-1]<-as.character(seq_input_h[[1]][[2]][indV]) v_seq[2*i]<-as.character(seq_input_l[[1]][[2]][indVL]) d_seq[2*i-1]<-as.character(seq_input_h[[2]][[2]][indD]) d_seq[2*i]<-"None" j_seq[2*i-1]<-as.character(seq_input_h[[3]][[2]][indJ]) j_seq[2*i]<-as.character(seq_input_l[[2]][[2]][indJL]) ref_seq[2*i-1]<-seq[2*i-1] ref_seq[2*i]<-seq[2*i] } if(vdj.productive=="naive"|vdj.productive=="vae"){ indH<-sample(x=1:nrow(seq_input_h),size=1) indL<-sample(x=1:nrow(seq_input_l),size=1) seq[2*i-1]<- tolower(seq_input_h$targetSequences[indH]) seq[2*i] <- tolower(seq_input_l$targetSequences[indL]) v_seq[2*i-1]<-tolower(seq_input_h$v_seq[indH]) v_seq[2*i]<-tolower(seq_input_l$v_seq[indL]) indH<-which(toupper(seq[2*i-1])==seq_input_h$targetSequences) indL<-which(toupper(seq[2*i])==seq_input_l$targetSequences) v_gene[2*i-1]<-seq_input_h$bestVHit[indH] v_gene[2*i]<-seq_input_l$bestVHit[indL] d_gene[2*i-1]<-seq_input_h$bestDHit[indH] d_gene[2*i]<-"None" j_gene[2*i-1]<-seq_input_h$bestJHit[indH] j_gene[2*i]<-seq_input_l$bestJHit[indL] c_gene[2*i-1]<-c_genename_h c_gene[2*i]<-paste0(stringr::str_sub(v_gene[2*i],1,3),"C") cdr3[2*i-1]<-seq_input_h$aaSeqCDR3[indH] cdr3[2*i]<-seq_input_h$aaSeqCDR3[indL] cdr3_nt[2*i-1]<-seq_input_h$nSeqCDR3[indH] cdr3_nt[2*i]<-seq_input_h$nSeqCDR3[indL] ref_seq[2*i-1]<-seq[2*i-1] ref_seq[2*i]<-seq[2*i] } chain[2*i-1]<-stringr::str_sub(v_gene[2*i-1],1,3) chain[2*i]<-stringr::str_sub(v_gene[2*i],1,3) Length[2*i-1]<-nchar(seq[2*i-1]) Length[2*i]<-nchar(seq[2*i]) raw_contig_id[2*i-1]<-paste(barcode[2*i-1],"contig_1",sep="_") raw_contig_id[2*i]<-paste(barcode[2*i],"contig_2",sep="_") raw_clonotype_id[2*i-1]<-paste0("clonotype",i) raw_clonotype_id[2*i]<-paste0("clonotype",i) seq_combi[i]<- paste0(chain[2*i-1],":",seq[2*i-1],";",chain[2*i],":",seq[2*i]) clonotype_id[i]<- paste0("clonotype",i) barcode_uniq[i]<-barcode[2*i] gen[i]<-1 seq.number[i]<-i seq.history[i]<-seq_combi[i] igraph.index[[i]]<-c(seq.number[i]) isotype[i]<-1 trans_state[i]<-1 if(cell.type=="T"&length(cd4_prob)>0){ trans_state[i]<-sample(x=c(1,4),size = 1,replace = T, prob = c(cd4_prob,1-cd4_prob)) } trans_state_his[i]<- trans_state[i] if(length(cell.division.prob)>2&special.v.gene==T){ pei<-which(special_v[,1]==v_gene[2*i-1]) pei2<-which(special_v[,2]==v_gene[2*i]) exe<-F if(length(pei)>0){ if(length(pei2)>0|any(special_v[pei,2]=="")) exe<-T } if(length(pei2)>0){ if(length(pei)>0|any(special_v[pei2,1]=="")) exe<-T } if(exe==T){ special_row<-c(pei,pei2)[which(c(pei,pei2)>0)[1]] seq_selection<-sample(x=c(0,1),size = 1,replace = T, prob = c(special_v[,3][special_row],1-special_v[,3][special_row])) if(seq_selection==0){ selected_rate<-c(selected_rate,stats::runif(1,cell.division.prob[3],cell.division.prob[length(cell.division.prob)])) selected_seq<-c(selected_seq,seq.number[i]) } } } } if(length(cell.division.prob)>2){ seq_selection<-sample(x=c(0,1),size = length(seq.number),replace = T, prob = c(sequence.selection.prob,1-sequence.selection.prob)) selected_rate<-c(selected_rate,stats::runif(sum(seq_selection==0),cell.division.prob[3],cell.division.prob[length(cell.division.prob)])) selected_seq<-c(selected_seq,seq.number[which(seq_selection==0)]) if(special.v.gene==T){ selected_rate<-selected_rate[!duplicated(selected_seq)] selected_seq<-selected_seq[!duplicated(selected_seq)] } } barcode.history<-barcode_uniq if(duration.of.evolution>=1){ for(i in 1:duration.of.evolution){ is_new_VDJ <- sample(x=c(0,1), replace=TRUE,size = 1, prob=c(vdj.branch.prob,1- vdj.branch.prob)) if (is_new_VDJ==0 & length(barcode_uniq)<max.cell.number & clonotype_num< max.clonotype.number){ clonotype_num<-clonotype_num+1 l<-length(barcode) barcode<-.ADD.BARCODE(barcode) if(vdj.productive=="random"){ indV <- sample(x = 1:nrow(seq_input_h[[1]]),size = 1,replace = FALSE) indD <- sample(x = 1:nrow(seq_input_h[[2]]),size = 1,replace=FALSE) indJ <- sample(x = 1:nrow(seq_input_h[[3]]),size = 1,replace=FALSE) indVL <- sample(x = 1:nrow(seq_input_l[[1]]),size = 1,replace = FALSE) indJL <- sample(x = 1:nrow(seq_input_l[[2]]),size=1,replace=FALSE) seq[l+1]<- as.character(.VDJ_RECOMBIN_FUNCTION(as.character(seq_input_h[[1]][[2]][indV]),as.character(seq_input_h[[2]][[2]][indD]),as.character(seq_input_h[[3]][[2]][indJ]), method=vdj.model, chain.type="heavy", species=species, vdj.insertion.mean=vdj.insertion.mean, vdj.insertion.stdv=vdj.insertion.stdv)) seq[l+2] <- as.character(.VDJ_RECOMBIN_FUNCTION(as.character(seq_input_l[[1]][[2]][indVL]),"",as.character(seq_input_l[[2]][[2]][indJL]), method=vdj.model, chain.type="light", species=species, vdj.insertion.mean=vdj.insertion.mean, vdj.insertion.stdv=vdj.insertion.stdv)) v_gene[l+1]<-as.character(seq_input_h[[1]][[1]][indV]) v_gene[l+2]<-as.character(seq_input_l[[1]][[1]][indVL]) d_gene[l+1]<-as.character(seq_input_h[[2]][[1]][indD]) d_gene[l+2]<-"None" j_gene[l+1]<-as.character(seq_input_h[[3]][[1]][indJ]) j_gene[l+2]<-as.character(seq_input_l[[2]][[1]][indJL]) c_gene[l+1]<-c_genename_h c_gene[l+2]<-paste0(stringr::str_sub(v_gene[l+2],1,3),"C") v_seq[l+1]<-as.character(seq_input_h[[1]][[2]][indV]) v_seq[l+2]<-as.character(seq_input_l[[1]][[2]][indVL]) d_seq[l+1]<-as.character(seq_input_h[[2]][[2]][indD]) d_seq[l+2]<-"None" j_seq[l+1]<-as.character(seq_input_h[[3]][[2]][indJ]) j_seq[l+2]<-as.character(seq_input_l[[2]][[2]][indJL]) r<-length(ref_seq) ref_seq[r+1]<-seq[l+1] ref_seq[r+2]<-seq[l+2] } if(vdj.productive=="naive"|vdj.productive=="vae"){ indH<-sample(x=1:nrow(seq_input_h),size=1) indL<-sample(x=1:nrow(seq_input_l),size=1) seq[l+1]<- tolower(seq_input_h$targetSequences[indH]) seq[l+2] <- tolower(seq_input_l$targetSequences[indL]) v_seq[l+1]<-tolower(seq_input_h$v_seq[indH]) v_seq[l+2]<-tolower(seq_input_l$v_seq[indL]) v_gene[l+1]<-seq_input_h$bestVHit[indH] v_gene[l+2]<-seq_input_l$bestVHit[indL] d_gene[l+1]<-seq_input_h$bestDHit[indH] d_gene[l+2]<-"None" j_gene[l+1]<-seq_input_h$bestJHit[indH] j_gene[l+2]<-seq_input_l$bestJHit[indL] c_gene[l+1]<-c_genename_h c_gene[l+2]<-paste0(stringr::str_sub(v_gene[l+2],1,3),"C") cdr3[l+1]<-seq_input_h$aaSeqCDR3[indH] cdr3[l+2]<-seq_input_l$aaSeqCDR3[indL] cdr3_nt[l+1]<-seq_input_h$nSeqCDR3[indH] cdr3_nt[l+2]<-seq_input_l$nSeqCDR3[indL] r<-length(ref_seq) ref_seq[r+1]<-seq[l+1] ref_seq[r+2]<-seq[l+2] } chain[l+1]<-stringr::str_sub(v_gene[l+1],1,3) chain[l+2]<-stringr::str_sub(v_gene[l+2],1,3) Length[l+1]<-nchar(seq[l+1]) Length[l+2]<-nchar(seq[l+2]) raw_contig_id[l+1]<-paste(barcode[l+1],"contig_1",sep="_") raw_contig_id[l+2]<-paste(barcode[l+2],"contig_2",sep="_") raw_clonotype_id[l+1]<-paste("clonotype",clonotype_num,sep="") raw_clonotype_id[l+2]<-paste("clonotype",clonotype_num,sep="") seq_combi[0.5*l+1]<- paste(chain[l+1],":",seq[l+1],";",chain[l+2],":",seq[l+2],sep="") clonotype_id[0.5*l+1]<- raw_clonotype_id[l+1] barcode_uniq[0.5*l+1]<- barcode[l+2] gen[0.5*l+1]<-1 trans_state[0.5*l+1]<-1 hisl<-length(seq.history) barcode.history[hisl+1]<-barcode_uniq[0.5*l+1] seq.number[hisl+1]<-max(seq.number)+1 seq.history[hisl+1]<-seq_combi[0.5*l+1] igraph.index[[clonotype_num]]<-c(seq.number[hisl+1]) isotype[hisl+1]<-1 if(cell.type=="T"&length(cd4_prob)>0){ trans_state[0.5*l+1]<-sample(x=c(1,4),size = 1,replace = T, prob = c(cd4_prob,1-cd4_prob)) } trans_state_his[hisl+1]<-trans_state[0.5*l+1] if(length(cell.division.prob)>2){ seq_selection<-sample(x=c(0,1),size = 1, prob = c(sequence.selection.prob,1-sequence.selection.prob)) if(special.v.gene==T){ pei<-which(special_v[,1]==v_gene[l+1]) pei2<-which(special_v[,2]==v_gene[l+2]) exe<-F if(length(pei)>0){ if(length(pei2)>0|any(special_v[pei,2]=="")) exe<-T } if(length(pei2)>0){ if(length(pei)>0|any(special_v[pei2,1]=="")) exe<-T } if(exe==T){ special_row<-c(pei,pei2)[which(c(pei,pei2)>0)[1]] seq_selection<-sample(x=c(0,1),size = 1,replace = T, prob = c(special_v[,3][special_row],1-special_v[,3][special_row])) if(seq_selection==0){ selected_rate<-c(selected_rate,stats::runif(1,cell.division.prob[3],cell.division.prob[length(cell.division.prob)])) selected_seq<-c(selected_seq,seq.number[hisl+1]) } } } if(special.v.gene==F){ if(seq_selection==0){ selected_rate<-c(selected_rate,stats::runif(1,cell.division.prob[3],cell.division.prob[length(cell.division.prob)])) selected_seq<-c(selected_seq,seq.number[hisl+1]) } } } } if(complete.duration==F&!(length(barcode_uniq)<max.cell.number & clonotype_num< max.clonotype.number)) break if(length(barcode_uniq)<max.cell.number & clonotype_num< max.clonotype.number){ for(j in 1:length(barcode_uniq)){ if(clonal.selection==F){ is_cell_division<- .CELL.DIVISION.LINEAR.INVERSE(clonotype_id,j,cell.division.prob) } if(clonal.selection==T){ is_cell_division<- .CELL.DIVISION.LINEAR(clonotype_id,j,cell.division.prob) } if(length(cell.division.prob)>2){ seq_number_j<-seq.number[which(barcode.history==barcode_uniq[j])] if(length(which(selected_seq==seq_number_j))>0){ rate_j<-selected_rate[which(selected_seq==seq_number_j)] is_cell_division<-sample(x=c(0,1),size=1,prob =c(rate_j,1-rate_j)) } } if (is_cell_division==0&length(barcode_uniq)<max.cell.number & clonotype_num< max.clonotype.number ){ l<-length(barcode) barcode<-.ADD.BARCODE(barcode) seq[l+1]<-seq[2*j-1] seq[l+2]<-seq[2*j] Length[l+1]<-nchar(seq[2*j-1]) Length[l+2]<-nchar(seq[2*j]) raw_contig_id[l+1]<-paste(barcode[l+1],"contig_1",sep="_") raw_contig_id[l+2]<-paste(barcode[l+2],"contig_2",sep="_") raw_clonotype_id[l+1]<-clonotype_id[j] raw_clonotype_id[l+2]<-clonotype_id[j] if(vdj.productive=="random"){ d_seq[l+1]<-d_seq[2*j-1] d_seq[l+2]<-"None" j_seq[l+1]<-j_seq[2*j-1] j_seq[l+2]<-j_seq[2*j] } v_seq[l+1]<-v_seq[2*j-1] v_seq[l+2]<-v_seq[2*j] v_gene[l+1]<-v_gene[2*j-1] v_gene[l+2]<-v_gene[2*j] d_gene[l+1]<-d_gene[2*j-1] d_gene[l+2]<-"None" j_gene[l+1]<-j_gene[2*j-1] j_gene[l+2]<-j_gene[2*j] c_gene[l+1]<-c_gene[2*j-1] c_gene[l+2]<-c_gene[2*j] if(vdj.productive=="naive"|vdj.productive=="vae"){ cdr3[l+1]<-cdr3[2*j-1] cdr3[l+2]<-cdr3[2*j] cdr3_nt[l+1]<-cdr3_nt[2*j-1] cdr3_nt[l+2]<-cdr3_nt[2*j] } chain[l+1]<-stringr::str_sub(v_gene[l+1],1,3) chain[l+2]<-stringr::str_sub(v_gene[l+2],1,3) trans_state[0.5*l+1]<-trans_state[j] seq_combi[0.5*l+1]<- paste(chain[l+1],":", seq[l+1],";",chain[l+2],":",seq[l+2],sep ="") clonotype_id[0.5*l+1]<- raw_clonotype_id[l+1] barcode_uniq[0.5*l+1]<-barcode[l+1] gen[0.5*l+1]<-gen[j]+1 n<- which(barcode.history==barcode_uniq[j]) hisl<-length(seq.history) isotype[hisl+1]<-isotype[n] barcode.history[hisl+1]<-barcode[l+1] seq.number[hisl+1]<-seq.number[n] seq.history[hisl+1]<-seq.history[n] trans_state_his[hisl+1]<-trans_state_his[n] } } } if(complete.duration==F&!(length(barcode_uniq)<max.cell.number & clonotype_num< max.clonotype.number)) break trans_switch<-rep(F,length(barcode_uniq)) if(transcriptome.switch.independent== T){ for(k in 1: length(barcode_uniq)) { switched<-.TRANS.SWITCH(trans_state[k],transcriptome.switch.prob) if(switched[[2]]){ trans_state[k]<-switched[[1]] m<-which(barcode.history==barcode_uniq[k]) trans_state_his[m]<-switched[[1]] trans_switch[k]<-T } } } if(transcriptome.switch.selection.dependent==T){ for(k in 1: length(barcode_uniq)) { m<-which(barcode.history==barcode_uniq[k]) if(length(which(selected_seq==seq.number[m]))>0&trans_state[k]==1){ switched<-.TRANS.SWITCH.DEPENDANT(trans_state[k],transcriptome.switch.prob) trans_state[k]<-switched[[1]] m<-which(barcode.history==barcode_uniq[k]) trans_state_his[m]<-switched[[1]] trans_switch[k]<-T } } } if (cell.type =="B"){ for(k in 1: length(barcode_uniq)) { n<- which(barcode.history==barcode_uniq[k]) selected<-(length(which(selected_seq==seq.number[n]))>0) if(class.switch.independent==T){ switched<-.TRANS.SWITCH(isotype[n],class.switch.prob) if(switched[[2]]){ isotype[n]<-switched[[1]] c_gene[2*k-1]<-colnames(class.switch.prob)[switched[[1]]] } } if(class.switch.selection.dependent==T&isotype[n]==1&selected){ switched<-.TRANS.SWITCH.DEPENDANT(isotype[n],class.switch.prob) isotype[n]<-switched[[1]] c_gene[2*k-1]<-colnames(class.switch.prob)[switched[[1]]] } if(switched[[2]]&trans_switch[k] == F & transcriptome.switch.isotype.dependent== T&sum(transcriptome.switch.prob[trans_state[k],])>0){ switched<-.TRANS.SWITCH.DEPENDANT(trans_state[k],transcriptome.switch.prob) trans_state[k]<-switched[[1]] m<-which(barcode.history==barcode_uniq[k]) trans_state_his[m]<-switched[[1]] trans_switch[k]<-T } if((SHM.isotype.dependent==F|length(which(iso_SHM_prob[,1]==c_gene[2*k-1]))==0)& (SHM.phenotype.dependent==F|length(which(pheno_SHM_prob[,1]==colnames(transcriptome.switch.prob)[trans_state[k]]))==0) ){ mut_h<-.SHM_FUNCTION_SEQUENCE4(seq[2*k-1],SHM.method,v_seq[2*k-1],SHM.nuc.prob) mut_l<-.SHM_FUNCTION_SEQUENCE4(seq[2*k],SHM.method,v_seq[2*k],SHM.nuc.prob) Ctrl<-mut_h[[2]]+mut_l[[2]] } if(!( (SHM.isotype.dependent==F|length(which(iso_SHM_prob[,1]==c_gene[2*k-1]))==0)& (SHM.phenotype.dependent==F|length(which(pheno_SHM_prob[,1]==colnames(transcriptome.switch.prob)[trans_state[k]]))==0) )){ new.SHM.nuc.prob<-max(iso_SHM_prob[which(iso_SHM_prob[,1]==c_gene[2*k-1]),2],pheno_SHM_prob[which(pheno_SHM_prob[,1]==colnames(transcriptome.switch.prob)[trans_state[k]]),2]) mut_h<-.SHM_FUNCTION_SEQUENCE4(seq[2*k-1],SHM.method,v_seq[2*k-1],new.SHM.nuc.prob) mut_l<-.SHM_FUNCTION_SEQUENCE4(seq[2*k],SHM.method,v_seq[2*k],new.SHM.nuc.prob) Ctrl<-mut_h[[2]]+mut_l[[2]] } if(Ctrl>0){ seq[2*k-1]<- mut_h[[1]] seq[2*k]<-mut_l[[1]] Length[2*k-1]<-nchar(seq[2*k-1]) Length[2*k]<-nchar(seq[2*k]) seq_combi[k]<- paste(chain[2*k-1],":",seq[2*k-1],";",chain[2*k],":",seq[2*k],sep ="") hisl<-length(seq.history) m<-which(barcode.history == barcode_uniq[k]) barcode.history[m]<-NA barcode.history[hisl+1]<-barcode_uniq[k] seq.number[hisl+1]<-max(seq.number)+1 seq.history[hisl+1]<-seq_combi[k] isotype[hisl+1]<-isotype[m] trans_state_his[hisl+1] <- trans_state_his[m] clonotype.number<-as.numeric(stringr::str_sub(clonotype_id[k],10,-1)) igraph.index[[clonotype.number]]<-c(igraph.index[[clonotype.number]],seq.number[m],seq.number[hisl+1]) if(length(cell.division.prob)>2){ seq_selection<-sample(x=c(0,1),size = 1,prob = c(sequence.selection.prob,1-sequence.selection.prob)) if(seq_selection==0){ selected_rate<-c(selected_rate,stats::runif(1,cell.division.prob[3],cell.division.prob[length(cell.division.prob)])) selected_seq<-c(selected_seq,seq.number[hisl+1]) } } if(trans_switch[k] == F & transcriptome.switch.isotype.dependent== T&sum(transcriptome.switch.prob[trans_state[k],])>0){ switched<-.TRANS.SWITCH.DEPENDANT(trans_state[k],transcriptome.switch.prob) trans_state[k]<-switched[[1]] m<-which(barcode.history==barcode_uniq[k]) trans_state_his[m]<-switched[[1]] trans_switch[k]<-T } } } } if(length(barcode_uniq<max.cell.number)&clonotype_num<max.clonotype.number){ for(k in 1:length(barcode_uniq)){ is_death <- sample(x=c(0,1), replace=TRUE,size = 1, prob=c(death.rate, 1- death.rate)) if (is_death ==0){ del<-c(-(2*k-1),-2*k) del.his<-which(barcode.history==barcode_uniq[k]) barcode<-barcode[del] seq<-seq[del] Length<-Length[del] raw_contig_id<-raw_contig_id[del] raw_clonotype_id<-raw_clonotype_id[del] v_gene<-v_gene[del] d_gene<-d_gene[del] j_gene<-j_gene[del] c_gene<-c_gene[del] chain<-chain[del] v_seq<- v_seq[del] d_seq<-d_seq[del] j_seq<-j_seq[del] cdr3<-cdr3[del] cdr3_nt<-cdr3_nt[del] barcode_uniq<-barcode_uniq[-k] seq_combi<-seq_combi[-k] clonotype_id<-clonotype_id[-k] gen<-gen[-k] trans_state<-trans_state[-k] barcode.history[del.his]<-NA } } } } } is_cell<-rep("Ture",length(barcode)) high_confidence<-is_cell full_length<-is_cell productive<-is_cell reads<-rep(1000,length(barcode)) umis<-rep(10,length(barcode)) if(vdj.productive=="random"){ cdr3<-rep("KKKK",length(barcode)) cdr3_nt<-rep("AAAAAAAAAAAA",length(barcode)) all_contig_annotations<-data.frame(barcode,is_cell,raw_contig_id,high_confidence,Length,chain,v_gene,d_gene,j_gene,c_gene,full_length,productive,cdr3,cdr3_nt,reads,umis,raw_clonotype_id) all_contig_annotations$raw_consensus_id<-paste(raw_clonotype_id,"consensus",substr(raw_contig_id,nchar(raw_contig_id),nchar(raw_contig_id)),sep = "_") } clonotypes<-as.data.frame(table(clonotype_id)) colnames(clonotypes)<-c("clonotype_id","frequency") clonotypes$proportion<-clonotypes$frequency/sum(clonotypes$frequency) if(vdj.productive=="naive"|vdj.productive=="vae"){ cdr3_combi<-.GENERATE.COMBI(cdr3,chain) cdr3_nt_combi<-.GENERATE.COMBI(cdr3_nt,chain) cdr3_df<-data.frame(clonotype_id,cdr3_combi,cdr3_nt_combi) clonotypes<-merge(clonotypes,cdr3_df,by="clonotype_id",all.x=T) clonotypes<-clonotypes[!(duplicated(clonotypes$clonotype_id)),] names(clonotypes)[4:5]<-c("cdr3","cdr3_nt") all_contig_annotations<-data.frame(barcode,is_cell,raw_contig_id,high_confidence,Length,chain,v_gene,d_gene,j_gene,c_gene,full_length,productive,cdr3,cdr3_nt,reads,umis,raw_clonotype_id) all_contig_annotations$raw_consensus_id<-paste(raw_clonotype_id,"consensus",substr(raw_contig_id,nchar(raw_contig_id),nchar(raw_contig_id)),sep = "_") } raw_consensus<-.WRITE.CONSENSUS(clonotype_id,sequence.combined = seq_combi,barcode.unique = barcode_uniq,clonotypes.dataframe = clonotypes) consensus<-raw_consensus[,-1] all_contig<-data.frame(raw_contig_id,seq) if(vdj.productive=="random"){ reference_id<-paste0(raw_clonotype_id,"_concat_ref_",stringr::str_sub(raw_contig_id,-1,-1)) reference_seq<-c() d_seq1<-gsub(pattern = "None",replacement = "",x = d_seq) reference_seq<-paste0(v_seq,d_seq1,j_seq) reference<-data.frame(reference_id,reference_seq) reference<-reference[!(duplicated(reference$reference_id)),] reference_id_real<-c() for(cn in 1:clonotype_num){ reference_id_real[2*cn-1]<-paste0("clonotype",cn,"_concat_ref_",1) reference_id_real[2*cn]<-paste0("clonotype",cn,"_concat_ref_",2) } reference_real<-data.frame(reference_id_real,ref_seq) colnames(reference_real)<-c("reference_id","reference_seq") } if(vdj.productive=="naive"|vdj.productive=="vae"){ reference_id<-c() for(cn in 1:clonotype_num){ reference_id[2*cn-1]<-paste0("clonotype",cn,"_concat_ref_",1) reference_id[2*cn]<-paste0("clonotype",cn,"_concat_ref_",2) } reference_real<-data.frame(reference_id,ref_seq) colnames(reference_real)<-c("reference_id","reference_seq") reference<-reference_real } history<-data.frame(seq.number,barcode.history,seq.history,isotype,trans_state_his) if(igraph.on==T){ size.of.vertex<-as.data.frame(stats::aggregate(barcode.history~seq.number,history,length,na.action = stats::na.pass)) size.of.vertex1<-as.data.frame(stats::aggregate(barcode.history~seq.number,history,length)) size.of.vertex<-merge(size.of.vertex1,size.of.vertex,all=T, by="seq.number")[,-3] size.of.vertex[!(size.of.vertex$seq.number %in% size.of.vertex1$seq.number),2]<-0 colnames(size.of.vertex)<-c("seq.number","size") rm(size.of.vertex1) history$isotype[is.na(history$barcode.history)]<-9999 history$trans_state_his[is.na(history$barcode.history)]<-9999 isotype.distribution<-reshape2::dcast(history, seq.number~isotype, value.var ="isotype",length)[,-1] if(length(which(colnames(isotype.distribution)==9999))!=0){ isotype.distribution<-isotype.distribution[,1:ncol(isotype.distribution)-1] } isotype.name<-colnames(isotype.distribution) isotype.distribution <-as.list(as.data.frame(t(isotype.distribution))) trans_state_distribution<-reshape2::dcast(history, seq.number~trans_state_his, value.var ="trans_state_his",length)[,-1] if(length(which(colnames(trans_state_distribution)==9999))!=0){ trans_state_distribution<-trans_state_distribution[,1:ncol(trans_state_distribution)-1] } trans_state_name<-colnames(trans_state_distribution) trans_state_distribution <-as.list(as.data.frame(t(trans_state_distribution))) for (i in 1:length(igraph.index)){ if (length(igraph.index[[i]])>1){ igraph.index[[i]]<-c(0,igraph.index[[i]]) igraph.index.attr[[i]]<-data.frame(igraph.index[[i]]) colnames(igraph.index.attr[[i]])<-"seq.number" igraph.index.jr[[i]]<-data.frame(igraph.index[[i]]) colnames(igraph.index.jr[[i]])<-"seq.number" igraph.index.attr[[i]]<-subset(igraph.index.attr[[i]],!duplicated(igraph.index.attr[[i]]$seq.number)) igraph.index.attr[[i]]<-merge(size.of.vertex,igraph.index.attr[[i]],by="seq.number",all.y=T) igraph.index.attr[[i]]$size[1]<-0 igraph.index.attr[[i]]$adj_size<-(igraph.index.attr[[i]]$size*60/(range(igraph.index.attr[[i]]$size)[2]-range(igraph.index.attr[[i]]$size)[1]))^0.5+15 igraph.index.attr[[i]]$adj_size<-replace(igraph.index.attr[[i]]$adj_size, igraph.index.attr[[i]]$size==0, 10) igraph.index.jr[[i]]$No<-match(x=igraph.index.jr[[i]]$seq.number,table=igraph.index.attr[[i]]$seq.number) size0<-which(igraph.index.attr[[i]]$size==0) No<- igraph.index.jr[[i]]$No pie.values<-list() for(j in 2:length(igraph.index.attr[[i]]$seq.number)){ pie.values[[j]]<-isotype.distribution[[igraph.index.attr[[i]]$seq.number[j]]] } pie.values.list[[i]]<-pie.values pie_values_trans<-list() for(j in 2:length(igraph.index.attr[[i]]$seq.number)){ pie_values_trans[[j]]<-trans_state_distribution[[igraph.index.attr[[i]]$seq.number[j]]] } pie_values_trans_list[[i]]<-pie_values_trans g<-igraph::graph(No,directed = T) g$layout<-igraph::layout_as_tree igraph::V(g)$label<-igraph.index.attr[[i]]$size igraph::V(g)$label[1]<-"Germline" igraph::V(g)$size<-igraph.index.attr[[i]]$adj_size igraph::V(g)$label.dist<-3 igraph::V(g)$shape<-"pie" if(length(size0)>1){ for(k in 1:length(size0)){ igraph::V(g)$shape[[size0[k]]]<-"circle" } } if(length(size0)==1 ){ igraph::V(g)$shape[[size0]]<-"circle" } igraph::V(g)$pie.color<-list(colors) igraph::V(g)$color<-"gray" igraph::V(g)$color[1]<-"black" p<-g igraph::V(g)$pie<-pie.values.list[[i]] igraph::V(p)$pie<-pie_values_trans_list[[i]] igraph_list[[i]]<-g igraph_list_trans[[i]]<-p } if (length(igraph.index[[i]])==1){ igraph.index.attr[[i]]<-size.of.vertex$size[which(size.of.vertex$seq.number==igraph.index[[i]])] igraph.index.jr[[i]]<-data.frame(igraph.index[[i]]) pie.values.list[[i]]<-list(isotype.distribution[igraph.index[[i]]][[1]]) pie_values_trans_list[[i]]<-list(trans_state_distribution[igraph.index[[i]]][[1]]) g<-igraph::graph(c(1,1), edges = NULL) g$layout<-igraph::layout_as_tree igraph::V(g)$size<-igraph.index.attr[[i]]/30+20 igraph::V(g)$label<-igraph.index.attr[[i]] igraph::V(g)$label.dist<-5 igraph::V(g)$shape<-"pie" igraph::V(g)$pie.color<-list(colors) p<-g igraph::V(g)$pie<-pie.values.list[[i]] igraph::V(p)$pie<-pie_values_trans_list[[i]] igraph_list[[i]]<-g igraph_list_trans[[i]]<-p } } } if(igraph.on==F){ igraph_list<-"none" igraph_list_trans<-"none" } if(transcriptome.on==T){ ngene<-nrow(transcriptome.states) ncell<-length(barcode_uniq) transcriptome<-as.data.frame(matrix(0,ngene,ncell,dimnames = list(rownames(transcriptome.states),barcode_uniq))) for(i in 1:length(barcode_uniq)){ transcriptome[,i]<-eval(parse(text=trans_dis[trans_state[i]])) } transcriptome<-as.matrix(transcriptome) transcriptome[transcriptome<0]<-0 } if(transcriptome.on==F){ transcriptome<-"none" } Seq_Name<-"none" if(seq.name!=F){ if(seq.name>clonotype_num){ warning("seq.name > total number of clones") seq.name<-clonotype_num } c_gene_h<-c_gene[which(nrow(c_gene)%%2!=0)] Name<-paste0(clonotype_id,"_",c_gene_h,"_",barcode_uniq,"_cluster",trans_state) seq_germ<-c() for(i in 1:clonotype_num){ seq_germ[i]<-paste(reference$reference_seq[2*i-1],reference$reference_seq[2*i],sep = "_") } clonotype_id_germ<-paste0("clonotype",1:clonotype_num) Name_germ<-rep("germline_clusterUnknown",clonotype_num) Seq<-c(seq_germ,seq_combi) Seq<-gsub(pattern = ";IGK:",replacement = "_",x = Seq) Seq<-gsub(pattern = ";IGL:",replacement = "_",x = Seq) Seq<-gsub(pattern = ";IGH:",replacement = "",x = Seq) Name<-c(Name_germ,Name) clonotype_id_temp<-c(clonotype_id_germ,clonotype_id) Seq_Name_temp<-data.frame(Seq,Name,clonotype_id_temp) frequency<-NULL uniq_clone<-dplyr::arrange(clonotypes,dplyr::desc(frequency))[1:seq.name,1] Seq_Name<-list() for(s in 1:length(uniq_clone)){ Seq_Name[[s]]<- Seq_Name_temp[which(Seq_Name_temp$clonotype_id_temp==uniq_clone[s]),-3] } } version<-R.Version() parameters<-list(initial.size.of.repertoire, species, cell.type, cd4.proportion, duration.of.evolution, complete.duration, vdj.productive, vdj.model, vdj.insertion.mean, vdj.insertion.stdv, vdj.branch.prob, clonal.selection, cell.division.prob, sequence.selection.prob, special.v.gene, class.switch.prob, class.switch.selection.dependent, class.switch.independent, SHM.method, SHM.nuc.prob, SHM.isotype.dependent, SHM.phenotype.dependent, max.cell.number, max.clonotype.number, death.rate, igraph.on, transcriptome.on, transcriptome.switch.independent, transcriptome.switch.prob, transcriptome.switch.isotype.dependent, transcriptome.switch.SHM.dependent, transcriptome.switch.selection.dependent, transcriptome.states, transcriptome.noise, seq.name) names(parameters)<-c("initial.size.of.repertoire", "species", "cell.type", "cd4.proportion", "duration.of.evolution", "complete.duration", "vdj.productive", "vdj.model", "vdj.insertion.mean", "vdj.insertion.stdv", "vdj.branch.prob", "clonal.selection", "cell.division.prob", "sequence.selection.prob", "special.v.gene", "class.switch.prob", "class.switch.selection.dependent", "class.switch.independent", "SHM.method", "SHM.nuc.prob", "SHM.isotype.dependent", "SHM.phenotype.dependent", "max.cell.number", "max.clonotype.number", "death.rate", "igraph.on", "transcriptome.on", "transcriptome.switch.independent", "transcriptome.switch.prob", "transcriptome.switch.isotype.dependent", "transcriptome.switch.SHM.dependent", "transcriptome.switch.selection.dependent", "transcriptome.states", "transcriptome.noise", "seq.name") output_list<-list(all_contig_annotations,clonotypes,all_contig,consensus,reference,reference_real,transcriptome,igraph_list,igraph_list_trans,Seq_Name,igraph.index.attr,history,igraph.index,selected_seq,version,parameters) names(output_list)<-c("all_contig_annotations","clonotypes","all_contig","consensus","reference","reference_real","transcriptome","igraph_list_iso","igraph_list_trans","Seq_Name","igraph.index.attr","history","igraph.index","selected.seq","version","parameters") return(output_list) }
plotCellMapping = function (feat.object, control) { assertClass(feat.object, "FeatureObject") if (missing(control)) control = list() assertList(control) blocks = feat.object$blocks assertIntegerish(blocks, lower = 1, len = 2) approach = control_parameter(control, "gcm.approach", "min") assertChoice(x = approach, choices = c("min", "mean", "near")) cf.power = control_parameter(control, "gcm.cf_power", 256L) orig.margins = par("mar") on.exit(par(mar = orig.margins)) par(mar = control_parameter(control, "gcm.margin", c(5, 5, 4, 4))) yvals = getObjectivesByApproach(feat.object, approach) sparse.matrix = calculateSparseMatrix(feat.object, yvals) canonical.list = computeCanonical(sparse.matrix) fundamental.list = computeFundamental( canonical.list = canonical.list, cf.power = cf.power) fundamental.mat = fundamental.list$fundamental.mat permutation.index = fundamental.list$permutation.index attractors = seq_len(ncol(fundamental.mat)) colors = matrix(NA, nrow = blocks[1L], ncol = blocks[2L]) color.index = apply(fundamental.mat != 0, 1, function(x) { if (sum(x) > 1) { return (1L) } else { return (2L + which(x)) } }) colors[permutation.index] = color.index colors[permutation.index[attractors]] = 2L arrow.mat = NULL for (attractor.id in attractors) { attractor.cell = celltoz(permutation.index[attractor.id], blocks) attracted.cells = which(fundamental.mat[, attractor.id] != 0) attracted.cells = attracted.cells[attracted.cells != attractor.id] if (length(attracted.cells) == 0) next arrows.to.attractor = vapply(attracted.cells, FUN = function(attracted.id) { attracted.cell = celltoz(permutation.index[attracted.id], blocks) direction = normalizeVector(attractor.cell - attracted.cell) weighted.direction = direction * fundamental.mat[attracted.id, attractor.id] return(c(attracted.cell, weighted.direction)) }, double(4L)) if (is.null(arrow.mat)) { arrow.mat = arrows.to.attractor } else { arrow.mat = cbind(arrow.mat, arrows.to.attractor) } } rownames(arrow.mat) = c("from.x", "from.y", "component.x", "component.y") col.attr = control_parameter(control, "gcm.color_attractor", " col.uncert = control_parameter(control, "gcm.color_uncertain", " col.basin = control_parameter(control, "gcm.color_basin", function(n) { hues = seq(15, 375, length = n + 1L) hcl(h = hues, l = 65, c = 100)[seq_len(n)] }) palette = c(col.uncert, col.attr, col.basin(length(attractors))) image(x = seq_len(blocks[1]), y = seq_len(blocks[2]), z = colors, useRaster = TRUE, col = palette, xlab = "", ylab = "", las = 1, breaks = seq(0.5, length(attractors) + 2.5, 1), xlim = c(0.5, blocks[1] + 0.5), ylim = c(0.5, blocks[2] + 0.5), xaxt = "n", yaxt = "n" ) grid.col = control_parameter(control, "gcm.color_grid", " abline(v = seq(0.5, blocks[1] + 0.5), col = grid.col, xlim = c(0.5, blocks[1] + 0.5), ylim = c(0.5, blocks[2] + 0.5) ) abline(h = seq(0.5, blocks[2] + 0.5), col = grid.col, xlim = c(0.5, blocks[1] + 0.5), ylim = c(0.5, blocks[1] + 0.5) ) if (control_parameter(control, "gcm.plot_arrows", TRUE)) { arrow.type = control_parameter(control, "gcm.arrowhead.type", "triangle") assertChoice(arrow.type, choices = c("simple", "curved", "triangle", "circle", "ellipse", "T")) arrow.length_x = control_parameter(control, "gcm.arrow.length_x", 0.9) arrow.length_y = control_parameter(control, "gcm.arrow.length_y", 0.9) arrowhead.length = control_parameter(control, "gcm.arrowhead.length", 0.1) arrowhead.width = control_parameter(control, "gcm.arrowhead.width", 0.1) assertNumber(arrow.length_x, lower = 0) assertNumber(arrow.length_y, lower = 0) assertNumber(arrowhead.length, lower = 0) assertNumber(arrowhead.width, lower = 0) apply(arrow.mat, 2, FUN = function(arrow) { arrow.length = sqrt(sum(arrow[3:4]^2)) shape::Arrows(x0 = arrow[1], y0 = arrow[2], x1 = arrow[1] + arrow[3] * arrow.length_x, y1 = arrow[2] + arrow[4] * arrow.length_y, arr.length = arrow.length * arrowhead.length, arr.width = arrow.length * arrowhead.width, arr.type = arrow.type) }) } if (control_parameter(control, "gcm.plot_coord_labels", TRUE)) { xlab_coord = control_parameter(control, "gcm.label.x_coord", "Cell Coordinate (1st Dimension)") ylab_coord = control_parameter(control, "gcm.label.y_coord", "Cell Coordinate (2nd Dimension)") axis(1, at = seq_len(blocks[1]), labels = rep("", blocks[1])) text(x = seq_len(blocks[1]), y = 0.25, pos = 1, xpd = TRUE, sprintf("%.1e", unique(feat.object$cell.centers[[1]])), srt = 45) mtext(side = 1, xlab_coord, line = 4, cex = par("cex")) axis(2, at = seq_len(blocks[2]), labels = rep("", blocks[2])) text(y = seq_len(blocks[2]), x = 0.45, pos = 2, xpd = TRUE, sprintf("%.1e", unique(feat.object$cell.centers[[2]])), srt = 45) mtext(side = 2, ylab_coord, line = 4, cex = par("cex")) } if (control_parameter(control, "gcm.plot_id_labels", TRUE)) { xlab_id = control_parameter(control, "gcm.label.x_id", "Cell ID (1st Dimension)") ylab_id = control_parameter(control, "gcm.label.y_id", "Cell ID (2nd Dimension)") mtext(side = 3, xlab_id, line = 2.5, cex = par("cex")) axis(side = 3, at = seq_len(blocks[1])) mtext(side = 4, ylab_id, line = 2.5, cex = par("cex")) axis(side = 4, at = seq_len(blocks[2]), las = 1) } }
stienen <- function(X, ..., bg="grey", border=list(bg=NULL)) { Xname <- short.deparse(substitute(X)) stopifnot(is.ppp(X)) if(npoints(X) <= 1) { W <- Window(X) dont.complain.about(W) do.call(plot, resolve.defaults(list(x=quote(W)), list(...), list(main=Xname))) return(invisible(NULL)) } d <- nndist(X) b <- bdist.points(X) Y <- X %mark% d observed <- (b >= d) Yobserved <- Y[observed] gp <- union(graphicsPars("symbols"), "lwd") dont.complain.about(Yobserved) do.call.plotfun(plot.ppp, resolve.defaults(list(x=quote(Yobserved), markscale=1), list(...), list(bg=bg), list(main=Xname)), extrargs=gp) if(!identical(border, FALSE)) { if(!is.list(border)) border <- list() Ycensored <- Y[!observed] dont.complain.about(Ycensored) do.call.plotfun(plot.ppp, resolve.defaults(list(x=quote(Ycensored), markscale=1, add=TRUE), border, list(...), list(bg=bg), list(cols=grey(0.5), lwd=2)), extrargs=gp) } return(invisible(NULL)) } stienenSet <- function(X, edge=TRUE) { stopifnot(is.ppp(X)) nnd <- nndist(X) if(!edge) { ok <- bdist.points(X) >= nnd X <- X[ok] nnd <- nnd[ok] } n <- npoints(X) if(n == 0) return(emptywindow(Window(X))) if(n == 1) return(Window(X)) rad <- nnd/2 if(!all(ok <- (rad > 0))) { eps <- min(rad[ok], shortside(Frame(X)))/100 rad <- pmax(rad, eps) } delta <- 2 * pi * max(rad)/128 Z <- disc(rad[1], X[1], delta=delta) for(i in 2:n) Z <- union.owin(Z, disc(rad[i], X[i], delta=delta)) return(Z) }
data("KarnatakaForest") data("NouraguesHD") KarnatakaForest <- KarnatakaForest[1:100, ] D <- KarnatakaForest$D coord <- cbind(KarnatakaForest$long, KarnatakaForest$lat) WD <- suppressMessages(getWoodDensity(KarnatakaForest$genus, KarnatakaForest$species)) H <- retrieveH(D, model = modelHD(NouraguesHD$D, NouraguesHD$H, method = "log2", useWeight = TRUE)) context("Function to compute the AGB") test_that("ComputeAGB with H", { expect_error(computeAGB(D, WD$meanWD[1:65]), "different lenghts") expect_error(computeAGB(D, WD$meanWD), "You need to provide either H or coord") expect_length(computeAGB(D, WD$meanWD, H = H$H), 100) expect_error(computeAGB(D, WD$meanWD, H$H[1:50])) H1 <- H$H H1[1] <- NA expect_warning( computeAGB(D, WD$meanWD, H = H1), "NA values" ) D1 <- D D1[1] <- NA expect_warning( computeAGB(D1, WD$meanWD, H = H1), "NA values in D" ) }) test_that("ComputeAGB with Dlim", { expect_true(any(computeAGB(D, WD$meanWD, H = H$H, Dlim = 5) == 0)) expect_true(any(computeAGB(D, WD$meanWD, H = H$H, Dlim = 5) > 0)) })
costdistances <- function(landscape, locs, method, NN) { fric.mat <- transition(landscape,function(x) 1/x[2],NN) fric.mat@crs@projargs<- "+proj=merc +units=m" fric.mat.cor <- geoCorrection(fric.mat) if (method=="leastcost") cd.mat <-costDistance(fric.mat.cor, locs, locs) if (method=="rSPDistance") cd.mat <- rSPDistance(fric.mat.cor, locs, locs, theta=1) if (method=="commute") cd.mat <-as.matrix(commuteDistance(fric.mat.cor,locs)) colnames(cd.mat) <- row.names(locs) rownames(cd.mat) <- row.names(locs) return (cd.mat) }
library(aster) set.seed(42) m <- 10 n <- 5 a <- matrix(rnorm(m * n), nrow = m) b <- rnorm(n) out <- .C(aster:::C_aster_mat_vec_mult, nrow = as.integer(m), ncol = as.integer(n), a = as.double(a), b = as.double(b), c = double(m)) all.equal(out$c, as.numeric(a %*% b)) b <- rnorm(m) out <- .C(aster:::C_aster_vec_mat_mult, nrow = as.integer(m), ncol = as.integer(n), a = as.double(a), b = as.double(b), c = double(n)) all.equal(out$c, as.numeric(b %*% a)) out <- .C(aster:::C_aster_mat_vec_mat_mult, nrow = as.integer(m), ncol = as.integer(n), a = as.double(a), b = as.double(b), c = matrix(as.double(0), n, n)) all.equal(out$c, t(a) %*% diag(b) %*% a) b <- matrix(rnorm(n * n), n) out <- .C(aster:::C_aster_diag_mat_mat_mat_mult, nrow = as.integer(m), ncol = as.integer(n), a = as.double(a), b = as.double(b), c = double(m)) all.equal(out$c, diag(a %*% b %*% t(a)))
export.ui = function(ps=get.ps()) { restore.point("export.ui") make.export.handlers() if (ps$show.download.rmarkdown) { rmarkdown.ui = list( downloadButton("downloadRmdBtn","Download as RMarkdown"), helpText("You can download the problem set with your current solution as an RMarkdown text file. Save it in a directory and open it with RStudio, to run freely the code on your computer and to create HTML, Word or PDF files from your solution. (Note: If you directly open the file with RStudio without saving it first, the file is probably stored in a directory to which you have no write access and RStudio gives the error 'Access denied' if you want to knit the file.)"), br()) } else { rmarkdown.ui = NULL } if (file.exists("downloads.zip")) { zip.ui = list( downloadButton("downloadZipBtn","Download additional material"), helpText("The ZIP file contains additional material, e.g. data or R scipts that is needed to replicate the analysis and run the RMarkdown file on your computer. You need to extract the zip file into the folder of your RMarkdown file.") ) } else { zip.ui = NULL } sub.ui = list( downloadButton("downloadSubBtn","Download Submission File"), helpText("Downloads a binary file with filetype .sub that contains the solution of your problem set. This file will be saved in your download folder. Your course instructor may have provided a website, e.g. on Moodle, where you can upload that submission file for grading."), br() ) list( br(), rmarkdown.ui, sub.ui, zip.ui ) } make.export.handlers = function(session=ps$session,ps=get.ps(), app=getApp()) { if(ps$show.download.rmarkdown){ setDownloadHandler("downloadRmdBtn", filename = paste0(ps$name, ".Rmd"), content = function(file) { restore.point("downLoadAsRmdHandler") txt = shiny.to.rmd.txt() Encoding(txt) <- "UTF-8" writeLines(txt, file,useBytes = TRUE) }, contentType = "text/Rmd" ) } setDownloadHandler("downloadSubBtn", filename = paste0(ps$name,"__",ps$user.name,".sub"), content = function(file) { restore.point("downLoadSubHandler") ups.to.sub.file(sub.file=file) }, contentType = "application/octet-stream" ) setDownloadHandler("downloadZipBtn", filename <- paste0(ps$name, "_extra_material.zip"), content <- function(file) { file.copy("downloads.zip", file) }, contentType = "application/zip" ) } shiny.to.rmd.txt = function(ps=get.ps(), user.name=get.user.name()) { restore.point("shiny.to.rmd.txt") cdt = ps$cdt rps = ps$rps txt = rps$empty.rmd.txt cl = rps$empty.rmd.chunk.lines rownames(cl) = cl$chunk.name chunks = intersect(cdt$chunk.name, cl$chunk.name) cl.ind = match(chunks, cl$chunk.name) cdt.ind = match(chunks, cdt$chunk.name) stud.code = cdt$stud.code[cdt.ind] start.lines = cl$start.line[cl.ind] clear.lines = do.call("c",lapply(cl.ind, function(i) int.seq(cl$start.line[i]+1,cl$end.line[i]-1))) txt[start.lines] = paste0(txt[start.lines],"\n",stud.code) addon.lines = which(str.starts.with(txt," if (length(addon.lines)>0) txt[addon.lines] = sapply(txt[addon.lines],make.rmd.addon.txt,ps=ps) info.lines = which((str.starts.with(txt,"info("))) for (line in info.lines) { header = txt[line] info.name = str.between(header,'"','"') info = rps$infos[[info.name]] str = paste0(info$rmd, collapse="\n") if (is.true(info$as.note)) str = paste0(" txt[line] = str } if (length(clear.lines)>0) txt = txt[-clear.lines] unlist(txt) } make.rmd.addon.txt = function(str, ps=get.ps(), ups=get.ups()) { restore.point("make.rmd.addon.txt") id = str.right.of(str, " str = str.right.of(str," type = str.left.of(str,"__") name = str.right.of(str,"__") Addon = ps$rps$Addons[[type]] ao = ps$rps$addons[[id]] rta = ao$rta res = Addon$out.txt.fun(ao,solved=isTRUE(rta$was.solved)) paste0(res, collapse="\n") }
mrbayes.lset <- function(..., partition){ args <- list(...) args <- lapply(args, format, scientific = FALSE, trim = TRUE) arg.set <- list( nucmodel = c("4by4", "doublet", "codon", "protein"), nst = c("1", "2", "6", "mixed"), code = c("universal", "vertmt", "mycoplasma", "yeast", "ciliates", "metmt"), ploidy = c("haploid", "diploid", "zlinked"), rates = c("equal", "gamma", "propinv", "invgamma", "adgamma"), ngammacat = format(1:24, trim = TRUE), nbetacat = format(1:24, trim = TRUE), omegavar = c("equal", "ny98", "m3"), covarion = c("no", "yes"), coding = c("all", "variable", "noabsencesites", "nopresencesites"), parsmodel = c("no", "yes") ) no.arg <- setdiff(names(args), names(arg.set)) if ( length(no.arg) > 0 ){ stop(paste('"', no.arg[1],'" is not valid argument', sep = "")) } for ( i in names(args) ){ args[[i]] <- match.arg(args[[i]], arg.set[[i]]) } if ( !missing(partition) ){ attr(args, "partition") <- partition } args }
library(RcppZiggurat) load("RcppZiggurat.RData")
updateNu = function(Varmeank) { return(rowMeans(Varmeank)) } updateLambda = function(Varmeank, Varcovk, nu) { d = dim(Varcovk)[1] K = dim(Varcovk)[3] lambda = 0 for (k in 1:K) { lambda = lambda + Trace(Varcovk[,,k]) + sum(Varmeank[,k]^2) - 2 * t(nu) %*% Varmeank[,k] + sum(nu^2) } return(lambda / (d * K)) }
anaglyph <- function(d3, blue, red, cex = 1) { d2 <- project3d(d3) with(d2, points(right, y, col = blue, pch = 20, cex = cex)) with(d2, points(left, y, col = red, pch = 20, cex = cex)) } project3d <- function(d3, length = par("din")[1] * 25.4, z0 = 300, d = 30) { length <- length * 0.3 x <- d3[, 1] * length y <- d3[, 2] * length z <- (1.5 + d3[, 3]) * length / 2 d2 <- data.frame( left = (z0 * x - z * d) / (z0 - z), right = (z0 * x + z * d) / (z0 - z), y = (z0 * y) / (z0 - z) ) / length * 0.5 } display_stereo <- function(blue, red, cex = 1, ...) { labels <- NULL init <- function(data, ...) { labels <<- abbreviate(colnames(data), 2) } render_frame <- function() { par(pty = "s", bg = "grey85") blank_plot(xlim = c(-1, 1), ylim = c(-1, 1)) } render_transition <- function() { } render_data <- function(data, proj, geodesic) { render_frame() anaglyph(data %*% proj, blue, red, cex = cex) axes <- project3d(proj) with(axes, { segments(0, 0, right, y, col = blue) segments(0, 0, left, y, col = red) text(right, y, col = blue, label = labels) text(left, y, col = red, label = labels) }) } list( init = init, render_frame = render_frame, render_transition = render_transition, render_data = render_data, render_target = nul ) } animate_stereo <- function(data, tour_path = grand_tour(3), blue = rgb(0, 0.91, 0.89), red = rgb(0.98, 0.052, 0), ...) { animate( data = data, tour_path = tour_path, display = display_stereo(blue, red, ...), ... ) }
.lapply <- function(X, FUN, ..., mc.cores=1L) { parallel::mclapply(X, FUN, ..., mc.cores=mc.cores) }
sig_tally <- function(object, ...) { timer <- Sys.time() send_info("Started.") on.exit(send_elapsed_time(timer)) UseMethod("sig_tally") } sig_tally.CopyNumber <- function(object, method = "Wang", ignore_chrs = NULL, indices = NULL, add_loh = FALSE, feature_setting = sigminer::CN.features, cores = 1, keep_only_matrix = FALSE, ...) { method <- match.arg(method, choices = c("Wang", "W", "Tao & Wang", "T", "X", "S")) if (startsWith(method, "T") | method == "X") { send_warning("Currently, the method 'T' is in experimental stage, please don't use it for now!") cn_list <- get_cnlist(object, ignore_chrs = ignore_chrs, add_index = TRUE) } else { cn_list <- get_cnlist(object, ignore_chrs = ignore_chrs) } if (startsWith(method, "W")) { send_info("Step: getting copy number features.") cn_features <- get_features_wang( CN_data = cn_list, cores = cores, genome_build = object@genome_build, feature_setting = feature_setting ) send_success("Gotten.") send_info("Step: generating copy number components.") if (!inherits(feature_setting, "sigminer.features")) { feature_setting <- get_feature_components(feature_setting) } send_success("{.code feature_setting} checked.") send_info("Step: counting components.") cn_components <- purrr::map2(cn_features, names(cn_features), count_components_wrapper, feature_setting = feature_setting ) send_success("Counted.") if ("BoChr" %in% names(cn_features)) { cn_features$BoChr <- cn_features$BoChr[cn_features$BoChr$value != 0] } send_info("Step: generating components by sample matrix.") cn_matrix <- data.table::rbindlist(cn_components, fill = TRUE, use.names = TRUE) %>% dplyr::as_tibble() %>% tibble::column_to_rownames(var = "component") %>% as.matrix() cn_matrix <- cn_matrix[feature_setting$component, ] %>% t() if (any(is.na(cn_matrix))) { send_warning("{.code NA} detected. There may be an issue, please contact the developer!") send_warning("Data will still returned, but please take case of it.") } feature_setting$n_obs <- colSums(cn_matrix, na.rm = TRUE) } else if (startsWith(method, "S")) { send_info("When you use method 'S', please make sure you have set 'join_adj_seg' to FALSE and 'add_loh' to TRUE in 'read_copynumber() in the previous step!") mat_list <- get_matrix_mutex_sv(data.table::rbindlist(cn_list, idcol = "sample")) cn_features <- NULL cn_components <- mat_list$data cn_matrix <- mat_list$CN_40 } else { send_info("Step: getting copy number features.") cn_features <- get_features_mutex( CN_data = cn_list, add_loh = add_loh, XVersion = method == "X", cores = cores ) send_success("Gotten.") send_info("Step: generating copy number components based on combination.") cn_components <- get_components_mutex(cn_features, XVersion = method == "X") send_success("Classified and combined.") send_info("Step: generating components by sample matrix.") if (method != "X") { cn_matrix_list <- get_matrix_mutex(cn_components, indices = indices ) } else { cn_matrix_list <- get_matrix_mutex_xv(cn_components, indices = indices ) } cn_matrix <- cn_matrix_list$ss_mat if (keep_only_matrix) { send_info("When keep_only_matrix is TRUE, only standard matrix kept.") } } send_success("Matrix generated.") if (keep_only_matrix) { cn_matrix } else { if (startsWith(method, "W")) { para_df <- feature_setting } else if (startsWith(method, "T")) { para_df <- "Message: No this info for method T." } else if (startsWith(method, "X")) { para_df <- "Message: No this info for method X." } else if (startsWith(method, "S")) { para_df <- "Message: No this info for method S." } if (startsWith(method, "T") | method == "X" | method == "S") { res_list <- list( features = cn_features, components = cn_components, parameters = para_df, nmf_matrix = cn_matrix, all_matrices = if (method == "X") { list( simplified_matrix = cn_matrix_list$ss_mat, standard_matrix = cn_matrix_list$s_mat ) } else if (method == "S") { list( CN_40 = mat_list$CN_40, CN_48 = mat_list$CN_48 ) } else { list( simplified_matrix = cn_matrix_list$ss_mat, standard_matrix = cn_matrix_list$s_mat, complex_matrix = cn_matrix_list$c_mat ) } ) } else { res_list <- list( features = cn_features, components = cn_components, parameters = para_df, nmf_matrix = cn_matrix ) } return(res_list) } } sig_tally.RS <- function(object, keep_only_matrix = FALSE, ...) { svlist <- get_svlist(object) send_success("Successfully get RS list!") sv_features <- get_features_sv(svlist) send_success("Successfully get RS features!") sv_component <- get_components_sv(sv_features) send_success("Successfully get RS component!") sv_matrix_list <- get_matrix_sv(CN_components = sv_component) send_success("Successfully get RS matrix!") res_list <- list( features = sv_features, components = sv_component, nmf_matrix = sv_matrix_list$RS_32, all_matrices = sv_matrix_list ) if (keep_only_matrix) { return(res_list$nmf_matrix) } else { return(res_list) } } sig_tally.MAF <- function(object, mode = c("SBS", "DBS", "ID", "ALL"), ref_genome = "BSgenome.Hsapiens.UCSC.hg19", genome_build = NULL, add_trans_bias = FALSE, ignore_chrs = NULL, use_syn = TRUE, keep_only_matrix = FALSE, ...) { if (!requireNamespace("BSgenome", quietly = TRUE)) { send_stop("Please install 'BSgenome' package firstly.") } mode <- match.arg(mode) hsgs.installed <- BSgenome::installed.genomes(splitNameParts = TRUE) data.table::setDT(x = hsgs.installed) if (nrow(hsgs.installed) == 0) { send_stop("Could not find any installed BSgenomes. Use {.code BSgenome::available.genomes()} for options.") } send_info("We would assume you marked all variants' position in + strand.") if (is.null(ref_genome)) { send_info("User did not set {.code ref_genome}.") send_success("Found following BSgenome installtions. Using first entry.\n") print(hsgs.installed) ref_genome <- hsgs.installed$pkgname[1] } else { if (!ref_genome %in% hsgs.installed$pkgname) { send_error("Could not find BSgenome {.code ", ref_genome, "}.") send_info("Found following BSgenome installtions. Correct {.code ref_genome} argument if necessary.") print(hsgs.installed) send_stop("Exit.") } } if (is.null(genome_build)) { if (grepl("hg19", ref_genome)) { genome_build <- "hg19" } else if (grepl("hg38", ref_genome)) { genome_build <- "hg38" } else if (grepl("mm10$", ref_genome)) { genome_build <- "mm10" } else if (grepl("mm9$", ref_genome)) { genome_build <- "mm9" } else { send_stop("Cannot guess the genome build, please set it by hand!") } } ref_genome <- BSgenome::getBSgenome(genome = ref_genome) send_success("Reference genome loaded.") query <- maftools::subsetMaf( maf = object, query = "Variant_Type %in% c('SNP', 'DNP', 'INS', 'DEL')", fields = "Chromosome", includeSyn = use_syn, mafObj = FALSE ) query <- query[!is.na(query$Reference_Allele) & !is.na(query$Tumor_Seq_Allele2)] send_success("Variants from MAF object queried.") if (!is.null(ignore_chrs)) { query <- query[!query$Chromosome %in% ignore_chrs] send_success("Unwanted contigs removed.") } if (nrow(query) == 0) { send_stop("Zero variants to analyze!") } query$Chromosome <- sub( pattern = "chr", replacement = "chr", x = as.character(query$Chromosome), ignore.case = TRUE ) query$Chromosome <- ifelse(startsWith(query$Chromosome, "chr"), query$Chromosome, paste0("chr", query$Chromosome) ) send_success("Chromosome names checked.") query$Chromosome <- sub( pattern = "x", replacement = "X", x = as.character(query$Chromosome), ignore.case = TRUE ) query$Chromosome <- sub( pattern = "y", replacement = "Y", x = as.character(query$Chromosome), ignore.case = TRUE ) query$Chromosome <- sub("23", "X", query$Chromosome) query$Chromosome <- sub("24", "Y", query$Chromosome) send_success("Sex chromosomes properly handled.") query <- query[query$Chromosome %in% paste0("chr", c(1:22, "X", "Y", "M", "MT"))] send_success("Only variants located in standard chromosomes (1:22, X, Y, M/MT) are kept.") query$Start_Position <- as.numeric(as.character(query$Start_Position)) query$End_Position <- as.numeric(as.character(query$End_Position)) send_success("Variant start and end position checked.") query_seq_lvls <- query[, .N, Chromosome] ref_seqs_lvls <- BSgenome::seqnames(x = ref_genome) query_seq_lvls_missing <- query_seq_lvls[!Chromosome %in% ref_seqs_lvls] if (nrow(query_seq_lvls_missing) > 3) { send_warning("Too many chromosome names cannot match reference genome. Try dropping 'chr' prefix to fix it...") query$Chromosome <- sub( pattern = "chr", replacement = "", x = as.character(query$Chromosome), ignore.case = TRUE ) query_seq_lvls <- query[, .N, Chromosome] query_seq_lvls_missing <- query_seq_lvls[!Chromosome %in% ref_seqs_lvls] send_info("Dropped.") } if (nrow(query_seq_lvls_missing) > 0) { send_warning(paste0( "Chromosome names in MAF must match chromosome names in reference genome.\nIgnorinig ", query_seq_lvls_missing[, sum(N)], " single nucleotide variants from missing chromosomes ", paste(query_seq_lvls_missing[, Chromosome], collapse = ", ") )) } query <- query[!Chromosome %in% query_seq_lvls_missing[, Chromosome]] send_success("Variant data for matrix generation preprocessed.") if (mode == "SBS") { res <- generate_matrix_SBS(query, ref_genome, genome_build = genome_build, add_trans_bias = add_trans_bias) } else if (mode == "DBS") { res <- generate_matrix_DBS(query, ref_genome, genome_build = genome_build, add_trans_bias = add_trans_bias) } else if (mode == "ID") { res <- generate_matrix_INDEL(query, ref_genome, genome_build = genome_build, add_trans_bias = add_trans_bias) } else { send_info("All types of matrices generation - start.") res_SBS <- tryCatch( generate_matrix_SBS(query, ref_genome, genome_build = genome_build, add_trans_bias = add_trans_bias), error = function(e) { if (e$message == "") { NULL } else { send_error("Unexpected error occurred:") send_stop(e$message) } } ) res_DBS <- tryCatch( generate_matrix_DBS(query, ref_genome, genome_build = genome_build, add_trans_bias = add_trans_bias), error = function(e) { if (e$message == "") { NULL } else { send_error("Unexpected error occurred:") send_stop(e$message) } } ) res_ID <- tryCatch( generate_matrix_INDEL(query, ref_genome, genome_build = genome_build, add_trans_bias = add_trans_bias), error = function(e) { if (e$message == "") { NULL } else { send_error("Unexpected error occurred:") send_stop(e$message) } } ) send_info("All types of matrices generation (APOBEC scores included) - end.") res <- c(res_SBS$all_matrices, res_DBS$all_matrices, res_ID$all_matrices) res$APOBEC_scores <- res_SBS$APOBEC_scores } send_success("Done.") if (keep_only_matrix) { if (mode == "ALL") { send_warning("Mode 'ALL' cannot return a single matrix.") return(res) } return(res$nmf_matrix) } else { return(res) } } utils::globalVariables( c( ".N", "Chromosome" ) )
rtvvar <- function(n, alpha, beta = 1, simple = FALSE) { x <- h <- hm <- numeric(n) for (i in 2:n) { if (simple == FALSE) { h[i] <- h_fun(i, beta, n) hm[i] <- sqrt(h[i]^2 + alpha * x[i-1]^2) x[i] <- hm[i] * rnorm(1) } else { x[i] <- (i/n) * rnorm(1) } } as.ts(x) } h_fun <- function(t, beta, n) { 10 - 10 * sin(beta*pi*t/n + pi/6) * (1 + t/n) }
test_that("output test", { expect_snapshot({ pillar(add_special(as.POSIXct("2017-07-28 18:04:35 +0200"))) pillar(add_special(as.POSIXlt("2017-07-28 18:04:35 +0200"))) }) withr::with_options( list(digits.secs = 4), expect_snapshot({ pillar(add_special(as.POSIXlt("2017-07-28 18:04:35 +0200"))) }) ) testthat::skip_if(getRversion() < "3.3") expect_snapshot({ pillar(add_special(as.difftime(8:11, units = "secs"))) }) })
"cdfgov" <- function(x, para) { if(! are.pargov.valid(para)) return() U <- para$para[1] A <- para$para[2] B <- para$para[3] lo <- U hi <- U+A "afunc" <- function(x, f) return(x - quagov(f, para)) f <- sapply(1:length(x), function(i) { tmp <- NULL try(tmp <- uniroot(afunc, lower=0, upper=1, x=x[i]), silent=TRUE) ifelse(is.null(tmp), return(NA), return(tmp$root)) } ) f[x < lo] <- 0 f[x > hi] <- 1 names(f) <- NULL f[! is.finite(f)] <- NA return(f) }
"tgp.postprocess" <- function(ll, Xnames, response, pred.n, zcov, Ds2x, improv, sens.p, Zm0r1, params, rmfiles=TRUE) { ll$X <- framify.X(ll$X, Xnames, ll$d) ll$Xsplit <- framify.X(ll$Xsplit, Xnames, ll$d) ll$nsplit <- NULL if(is.null(response)) ll$response <- "z" else ll$response <- response if(Ds2x == FALSE) { ll$Ds2x <- NULL; } if(improv == FALSE || is.null(improv)) { ll$improv <- NULL; } if(ll$nn == 0 || (ll$BTE[2]-ll$BTE[1])==0 || !is.null(sens.p)) { ll$XX <- ll$ZZ.mean <- ll$ZZ.s2 <- ll$ZZ.q <- ll$ZZ.km <- ll$ZZ.ks2 <- ll$ZZ.vark <- NULL ll$ZZ.q1 <- ll$ZZ.med <- ll$ZZ.q2 <- ll$ZpZZ.s2 <- ll$Ds2x <- ll$improv <- NULL } else { if((!is.null(ll$improv)) && sum(is.nan(ll$improv) > 0)) { warning(paste("encountered", sum(is.nan(ll$improv)), "NaN in Improv, replaced with zeros"), call.=FALSE) ll$improv[is.nan(ll$improv)] <- 0 } ll$XX <- framify.X(ll$XX, Xnames, ll$d) } if(!is.null(improv)){ ll$irank[ll$irank == 0] <- ll$nn ll$improv <- data.frame(improv=ll$improv, rank=ll$irank) } ll$irank <- NULL if(pred.n == FALSE || ll$BTE[2]-ll$BTE[1] == 0) { ll$Zp.mean <- ll$Zp.q <- ll$Zp.q1 <- ll$Zp.q2 <- NULL; ll$Zp.s2 <- ll$ZpZZ.s2 <- ll$Zp.km <- ll$Zp.vark <- ll$Zp.ks2 <- ll$Zp.med <- NULL } if(file.exists(paste("./", "best_parts_1.out", sep=""))) { ll$parts <- as.matrix(read.table("best_parts_1.out")) if(rmfiles) unlink("best_parts_1.out") } else { ll$parts <- NULL } ll$trees <- tgp.get.trees(ll$Xsplit, rmfiles) ll$posts <- read.table("tree_m0_posts.out", header=TRUE) if(ll$BTE[2] - ll$BTE[1] == 0) ll$posts <- NULL if(rmfiles) unlink("tree_m0_posts.out") if(ll$trace) ll$trace <- tgp.read.traces(ll$n, ll$nn, ll$d, params$corr, ll$verb, rmfiles) else ll$trace <- NULL ll$params <- params ll$verb <- NULL; ll$state <- NULL; ll$tree <- NULL; ll$MAP <- NULL; ll$nt <- NULL ll$ncol <- NULL; ll$hier <- NULL; ll$pred.n <- ll$nnprime <- ll$krige <- ll$bDs2x <- NULL nt <- as.integer(ll$itemps[1]) lambda <- ll$itemps[length(ll$itemps)] if(lambda == 1) lambda <- "opt" else if(lambda == 2) lambda <- "naive" else if(lambda == 3) lambda <- "st" else stop(paste("bad lambda = ", lambda, sep="")) ll$itemps <- list(c0n0=as.integer(ll$itemps[2:3]), k=ll$itemps[4:(nt+3)], pk=ll$itemps[(nt+4):(2*nt+3)], counts=as.integer(ll$itemps[(2*nt+4):(3*nt+3)]), lambda=lambda) if(nt == 1) ll$ess <- ll$ess[1] else { ll$ess=list(combined=ll$ess[1], each=data.frame(k=ll$itemps$k, count=ll$ess[2:(nt+1)], ess=ll$ess[(nt+2):(2*nt+1)])) } if(ll$linburn) ll$linburn <- TRUE else ll$linburn <- FALSE ll$gpcs[is.nan(ll$gpcs)] <- NA ll$gpcs <- data.frame(t(ll$gpcs)) names(ll$gpcs) <- c("grow", "prune", "change", "swap") if(!is.null(sens.p)){ names(sens.p) <- NULL sens.par <- list(nn.lhs=sens.p[1], rect=matrix(sens.p[2:(ll$d*2+1)], nrow=2), shape=sens.p[(ll$d*2+2):(ll$d*3+1)], mode=sens.p[(ll$d*3+2):(ll$d*4+1)], ngrid=ll$sens.ngrid, span=ll$sens.span) sens <- list() sens$par <- sens.par sens$ngrid <- NULL sens$span <- NULL sens$Xgrid <- matrix(ll$sens.Xgrid, ncol=ll$d) sens$ZZ.mean <- matrix(ll$sens.ZZ.mean, ncol=ll$d) sens$ZZ.q1 <- matrix(ll$sens.ZZ.q1, ncol=ll$d) sens$ZZ.q2 <- matrix(ll$sens.ZZ.q2, ncol=ll$d) sens$S <- matrix(ll$sens.S, ncol=ll$d, byrow=TRUE) sens$T <- matrix(ll$sens.T, ncol=ll$d, byrow=TRUE) } else{ sens <- NULL } ll$sens.Xgrid <- ll$sens.ZZ.mean <- ll$sens.ZZ.q1 <- ll$sens.ZZ.q2 <- NULL ll$sens.ngrid <- ll$sens.span <- ll$sens.S <- ll$sens.T <- NULL ll$sens <- sens if(!is.null(Zm0r1)) { ll$Z <- undo.mean0.range1(ll$Z,Zm0r1$undo) ll$Zp.mean <- undo.mean0.range1(ll$Zp.mean,Zm0r1$undo) ll$ZZ.mean <- undo.mean0.range1(ll$ZZ.mean,Zm0r1$undo) ll$Zp.km <- undo.mean0.range1(ll$Zp.km,Zm0r1$undo) ll$ZZ.km <- undo.mean0.range1(ll$ZZ.km,Zm0r1$undo) ll$Zp.vark <- undo.mean0.range1(ll$Zp.vark,Zm0r1$undo, nomean=TRUE, s2=TRUE) ll$ZZ.vark <- undo.mean0.range1(ll$ZZ.vark,Zm0r1$undo, nomean=TRUE, s2=TRUE) ll$Zp.ks2 <- undo.mean0.range1(ll$Zp.ks2,Zm0r1$undo, nomean=TRUE, s2=TRUE) ll$ZZ.ks2 <- undo.mean0.range1(ll$ZZ.ks2,Zm0r1$undo, nomean=TRUE, s2=TRUE) ll$ZpZZ.ks2 <- undo.mean0.range1(ll$ZpZZ.ks2,Zm0r1$undo, nomean=TRUE, s2=TRUE) ll$Zp.q <- undo.mean0.range1(ll$Zp.q,Zm0r1$undo, nomean=TRUE) ll$ZZ.q <- undo.mean0.range1(ll$ZZ.q,Zm0r1$undo, nomean=TRUE) ll$Zp.s2 <- undo.mean0.range1(ll$Zp.s2,Zm0r1$undo, nomean=TRUE, s2=TRUE) ll$ZZ.s2 <- undo.mean0.range1(ll$ZZ.s2,Zm0r1$undo, nomean=TRUE, s2=TRUE) ll$Zp.q1 <- undo.mean0.range1(ll$Zp.q1,Zm0r1$undo) ll$Zp.med <- undo.mean0.range1(ll$Zp.med,Zm0r1$undo) ll$Zp.q2 <- undo.mean0.range1(ll$Zp.q2,Zm0r1$undo) ll$ZZ.q1 <- undo.mean0.range1(ll$ZZ.q1,Zm0r1$undo) ll$ZZ.med <- undo.mean0.range1(ll$ZZ.med,Zm0r1$undo) ll$ZZ.q2 <- undo.mean0.range1(ll$ZZ.q2,Zm0r1$undo) for(j in 1:ll$d){ ll$sens.ZZ.mean[,j] <- undo.mean0.range1(ll$sens.ZZ.mean[,j],Zm0r1$undo) ll$sens.ZZ.q1[,j] <- undo.mean0.range1(ll$sens.ZZ.q1[,j],Zm0r1$undo) ll$sens.ZZ.q2[,j] <- undo.mean0.range1(ll$sens.ZZ.q2[,j],Zm0r1$undo) } ll$m0r1 <- TRUE } else { ll$m0r1 <- FALSE } if(!is.null(ll$Zp.s2) && ll$zcov) ll$Zp.s2 <- matrix(ll$Zp.s2, ncol=ll$n) if(!is.null(ll$ZZ.s2) && ll$zcov) ll$ZZ.s2 <- matrix(ll$ZZ.s2, ncol=ll$nn) if(!is.null(ll$ZpZZ.s2) && ll$zcov) ll$ZpZZ.s2 <- t(matrix(ll$ZpZZ.s2, ncol=ll$n)) else ll$ZpZZ.s2 <- NULL ll$zcov <- NULL class(ll) <- "tgp" return(ll) } "tgp.get.trees" <- function(X, rmfiles=TRUE) { trees <- list() tree.files <- list.files(pattern="tree_m0_[0-9]+.out") if(length(tree.files) == 0) return(NULL) for(i in 1:length(tree.files)) { h <- as.numeric(strsplit(tree.files[i], "[_.]")[[1]][3]) trees[[h]] <- read.table(tree.files[i], header=TRUE) if(rmfiles) unlink(tree.files[i]) if(nrow(trees[[h]]) == 1) next; nodes <- (1:length(trees[[h]]$var))[trees[[h]]$var != "<leaf>"] for(j in 1:length(nodes)) { col <- as.numeric(as.character(trees[[h]]$var[nodes[j]])) + 1 m <- which.min(abs(X[,col] - trees[[h]]$val[nodes[j]])) trees[[h]]$val[nodes[j]] <- X[m,col] } } return(trees) }
ict.test <- function(y, treat, J = NA, alpha = 0.05, n.draws = 250000, gms = TRUE, pi.table = TRUE){ if(class(y)=="matrix") design <- "modified" else design = "standard" if (design == "modified") { J <- ncol(y) - 1 treat <- ifelse(!is.na(y[,J+1]), 1, 0) for (j in 1:(J+1)) for (i in 1:nrow(y)) y[i,j] <- ifelse(is.na(y[i,j]), 0, y[i,j]) y <- apply(y, 1, sum) } else { if (is.na(J)) stop("You must fill in the option J, the number of control items.") } condition.values <- sort(unique(treat)) treatment.values <- condition.values[condition.values!=0] if(length(treatment.values) > 1) multi <- TRUE else multi <- FALSE y.all <- y treat.all <- treat bonferroni <- rep(NA, length(treatment.values)) for (curr.treat in treatment.values) { y <- y.all[treat.all == 0 | treat.all == curr.treat] treat <- treat.all[treat.all == 0 | treat.all == curr.treat] treat <- treat > 0 t.y1 <- pi.y1 <- rep(NA, J) for(j in 0:(J-1)) { pi.y1[j+1] <- mean(y[treat==0] <= j) - mean(y[treat==1] <= j) try(t.y1[j+1] <- t.test(y[treat==0] <= j, y[treat==1] <= j, alternative = "less")$p.value) } t.y0 <- pi.y0 <- rep(NA, J) for(j in 1:J) { pi.y0[j] <- mean(y[treat==1] <= j) - mean(y[treat==0] <= (j - 1)) try(t.y0[j] <- t.test(y[treat==1] <= j, y[treat==0] <= (j - 1), alternative = "less")$p.value) } n <- length(y) y.comb <- c(0:(J-1), 1:J) t.comb <- c(rep(1, J), rep(0, J)) cond.y1 <- pi.y1 == 0 cond.y0 <- pi.y0 == 0 if (gms == TRUE) { cond.y1 <- (pi.y1 == 0) | (sqrt(n) * pi.y1 / sqrt(var(pi.y1)) > sqrt(log(n))) cond.y0 <- (pi.y0 == 0) | (sqrt(n) * pi.y0 / sqrt(var(pi.y0)) > sqrt(log(n))) } rho.pi <- cov.pi <- sd.pi <- matrix(NA, ncol = length(y.comb), nrow = length(y.comb)) sd <- rep(NA, length(y.comb)) for(j in 1:length(y.comb)) { if(t.comb[j]==1) sd[j] <- sqrt(((mean(y[treat==1] <= y.comb[j])*(1-mean(y[treat==1] <= y.comb[j])))/sum(treat==1) + (mean(y[treat==0] <= y.comb[j])*(1-mean(y[treat==0] <= y.comb[j])))/sum(treat==0))) if(t.comb[j]==0) sd[j] <- sqrt(((mean(y[treat==1] <= (y.comb[j]-1+1))*(1-mean(y[treat==1] <= (y.comb[j]-1+1))))/sum(treat==1) + (mean(y[treat==0] <= (y.comb[j]-1+0))*(1-mean(y[treat==0] <= (y.comb[j]-1+0))))/sum(treat==0))) } for(j in 1:length(y.comb)) { for(k in 1:length(y.comb)) { if(t.comb[j]==1 & t.comb[k]==1) { if(y.comb[j]==y.comb[k]) cov.pi[j,k] <- sd[j]^2 else if(y.comb[j] < y.comb[k]) cov.pi[j,k] <- (mean(y[treat==1] <= y.comb[j])*(1-mean(y[treat==1] <= y.comb[k]))/sum(treat==1) + mean(y[treat==0] <= y.comb[j])*(1-mean(y[treat==0] <= y.comb[k]))/sum(treat==0)) else cov.pi[j,k] <- 0 if(y.comb[j] <= y.comb[k]) rho.pi[j,k] <- cov.pi[j,k] / (sd[j]*sd[k]) else rho.pi[j,k] <- 0 } if(t.comb[j]==0 & t.comb[k]==0) { if(y.comb[j]==y.comb[k]) cov.pi[j,k] <- sd[j]^2 else if(y.comb[j] <= y.comb[k]) cov.pi[j,k] <- (mean(y[treat==1] <= (y.comb[j] -1 +1))*(1-mean(y[treat==1] <= (y.comb[k]-1+1)))/sum(treat==1) + mean(y[treat==0] <= (y.comb[j] - 1 + 0))*(1-mean(y[treat==0] <= (y.comb[k] - 1 + 0)))/sum(treat==0)) else cov.pi[j,k] <- 0 if(y.comb[j] <= y.comb[k]) rho.pi[j,k] <- cov.pi[j,k] / (sd[j]*sd[k]) else rho.pi[j,k] <- 0 } if(t.comb[j]==0 & t.comb[k]==1) { if(y.comb[j] <= y.comb[k]) cov.pi[j,k] <- (-1)*(mean(y[treat==1] <= (y.comb[j]-1+1))*(1-mean(y[treat==1] <= y.comb[k]))/sum(treat==1) + mean(y[treat==0] <= (y.comb[j]-1+0))*(1-mean(y[treat==0] <= y.comb[k]))/sum(treat==0)) else cov.pi[j,k] <- 0 if(y.comb[j] <= y.comb[k]) rho.pi[j,k] <- cov.pi[j,k] / (sd[j]*sd[k]) else rho.pi[j,k] <- 0 } if(t.comb[j]==1 & t.comb[k]==0) { if(y.comb[j] < y.comb[k]) cov.pi[j,k] <- (-1) * ( mean(y[treat==1] <= y.comb[j])*(1-mean(y[treat==1] <= (y.comb[k]-1+1)))/sum(treat==1) + mean(y[treat==0] <= y.comb[j])*(1-mean(y[treat==0] <= (y.comb[k] - 1+ 0)))/sum(treat==0)) else if(y.comb[j]==y.comb[k]) cov.pi[j,k] <- (-1) * (mean(y[treat==1] <= (y.comb[j]-1+1))*(1-mean(y[treat==1] <= y.comb[k]))/sum(treat==1) + mean(y[treat==0] <= (y.comb[j]-1+0))*(1-mean(y[treat==0] <= y.comb[j]))/sum(treat==0)) else cov.pi[j,k] <- 0 if(y.comb[j] <= y.comb[k]) rho.pi[j,k] <- cov.pi[j,k] / (sd[j]*sd[k]) else rho.pi[j,k] <- 0 } } } for(i in 1:nrow(rho.pi)){ for(j in 1:ncol(rho.pi)){ if(y.comb[i]>y.comb[j]) rho.pi[i,j] <- rho.pi[j,i] if(y.comb[i]>y.comb[j]) cov.pi[i,j] <- cov.pi[j,i] } } if (length(pi.y1) > 0) { rho.pi.y1 <- rho.pi[1:length(pi.y1), 1:length(pi.y1)] cov.pi.y1 <- cov.pi[1:length(pi.y1), 1:length(pi.y1)] } if (length(pi.y0) > 0) { rho.pi.y0 <- rho.pi[(length(pi.y1)+1):(length(pi.y1) + length(pi.y0)), (length(pi.y1)+1):(length(pi.y1) + length(pi.y0))] cov.pi.y0 <- cov.pi[(length(pi.y1)+1):(length(pi.y1) + length(pi.y0)), (length(pi.y1)+1):(length(pi.y1) + length(pi.y0))] } y.comb.tb <- c(0:J, 0:J) t.comb.tb <- c(rep(1, J+1), rep(0, J+1)) sd.tb <- rep(NA, length(y.comb.tb)) for(j in 1:length(y.comb.tb)) { if(t.comb.tb[j]==1) sd.tb[j] <- sqrt(((mean(y[treat==1] <= y.comb.tb[j])*(1-mean(y[treat==1] <= y.comb.tb[j])))/sum(treat==1) + (mean(y[treat==0] <= y.comb.tb[j])*(1-mean(y[treat==0] <= y.comb.tb[j])))/sum(treat==0))) if(t.comb.tb[j]==0) sd.tb[j] <- sqrt(((mean(y[treat==1] <= (y.comb.tb[j]-1+1))*(1-mean(y[treat==1] <= (y.comb.tb[j]-1+1))))/sum(treat==1) + (mean(y[treat==0] <= (y.comb.tb[j]-1+0))*(1-mean(y[treat==0] <= (y.comb.tb[j]-1+0))))/sum(treat==0))) } pi.y1.tb <- rep(NA, J+1) for(j in 0:J) { pi.y1.tb[j+1] <- mean(y[treat==0] <= j) - mean(y[treat==1] <= j) } pi.y0.tb <- rep(NA, J+1) for(j in 0:J) { pi.y0.tb[j+1] <- mean(y[treat==1] <= j) - mean(y[treat==0] <= (j - 1)) } tb <- round(rbind(cbind(pi.y1.tb, sd.tb[1:(J+1)]), cbind(pi.y0.tb, sd.tb[(J+2):((J+1)*2)])), 4) rownames(tb) <- c(paste("pi(Y_i(0) = ", 0:J, ", Z_i = 1)", sep = ""), paste("pi(Y_i(0) = ", 0:J, ", Z_i = 0)", sep = "")) colnames(tb) <- c("est.", "s.e.") pi.y1 <- pi.y1[cond.y1 == FALSE] pi.y0 <- pi.y0[cond.y0 == FALSE] t.y1 <- t.y1[cond.y1 == FALSE] t.y0 <- t.y0[cond.y0 == FALSE] rho.pi.y1 <- rho.pi.y1[cond.y1 == FALSE, cond.y1 == FALSE] cov.pi.y1 <- cov.pi.y1[cond.y1 == FALSE, cond.y1 == FALSE] rho.pi.y0 <- rho.pi.y0[cond.y0 == FALSE, cond.y0 == FALSE] cov.pi.y0 <- cov.pi.y0[cond.y0 == FALSE, cond.y0 == FALSE] if (length(pi.y1) > 1) { par.y1 <- rep(0, length(pi.y1)) Dmat <- 2*ginv(cov.pi.y1) Amat <- diag(length(pi.y1)) if (sum(pi.y1 < 0) > 0) { lambda <- solve.QP(Dmat, par.y1, Amat, bvec = -pi.y1)$value } else { lambda <- 0 } w <- rep(NA, length(pi.y1)+1) rho.pi.y1.partial <- cor2pcor(rho.pi.y1) if (length(pi.y1)==2) { w[3] <- .5 * pi^(-1) * acos(rho.pi.y1[1,2]) w[2] <- .5 w[1] <- .5 - .5 * pi^(-1) * acos(rho.pi.y1[1,2]) } else if (length(pi.y1)==3) { rho.pi.y1.partial.12.3 <- (rho.pi.y1[1,2] - rho.pi.y1[1,3] * rho.pi.y1[2,3])/(sqrt(1-rho.pi.y1[1,3]^2) * sqrt(1-rho.pi.y1[2,3]^2)) rho.pi.y1.partial.13.2 <- (rho.pi.y1[1,3] - rho.pi.y1[1,2] * rho.pi.y1[3,2])/(sqrt(1-rho.pi.y1[1,2]^2) * sqrt(1-rho.pi.y1[3,2]^2)) rho.pi.y1.partial.23.1 <- (rho.pi.y1[2,3] - rho.pi.y1[2,1] * rho.pi.y1[3,1])/(sqrt(1-rho.pi.y1[2,1]^2) * sqrt(1-rho.pi.y1[3,1]^2)) w[1] <- .25 * pi^(-1) * (2 * pi - acos(rho.pi.y1[1,2]) - acos(rho.pi.y1[1,3]) - acos(rho.pi.y1[2,3])) w[2] <- .25 * pi^(-1) * (3 * pi - acos(rho.pi.y1.partial.12.3) - acos(rho.pi.y1.partial.13.2) - acos(rho.pi.y1.partial.23.1)) w[3] <- .5 - w[1] w[4] <- .5 - w[2] } else if (length(pi.y1)==4) { w[4] <- .125 * pi^(-1) * (-4 * pi + acos(rho.pi.y1[4,3]) + acos(rho.pi.y1[3,2]) + acos(rho.pi.y1[4,2])) w[3] <- .25 * pi^(-2) * ( acos(rho.pi.y1[4,3]) * (pi - acos(rho.pi.y1.partial[2,1]))) w[2] <- .125 * pi^(-1) * ( 8 * pi - acos(rho.pi.y1[4,3]) + acos(rho.pi.y1[3,2]) + acos(rho.pi.y1[4,2])) w[1] <- pmvnorm(mean = pi.y1, sigma = cov.pi.y1, lower = rep(0, length(pi.y1))) w[5] <- .5 - w[1] - w[3] } else if (length(pi.y1)>4) { draws <- mvrnorm(n = n.draws, mu = par.y1, Sigma = cov.pi.y1) pi.tilde <- matrix(NA, nrow = n.draws, ncol = length(pi.y1)) for (i in 1:n.draws) { if (sum(draws[i,] < 0) > 1) { pi.tilde[i,] <- solve.QP(Dmat, par.y1, Amat, bvec = -draws[i,])$solution + draws[i,] } else { pi.tilde[i,] <- draws[i, ] } } pi.tilde.pos.count <- apply(pi.tilde, 1, function(x) { sum(x > 0) }) for(k in 0:J) w[k+1] <- mean(pi.tilde.pos.count==(J-k)) } p.y1 <- 0 for(k in 0:length(pi.y1)) p.y1 <- p.y1 + w[k+1] * pchisq(lambda, df = k, lower.tail = FALSE) } else if (length(pi.y1) == 1) { p.y1 <- t.y1 } if (length(pi.y0) > 1) { par.y0 <- rep(0, length(pi.y0)) Dmat <- 2*ginv(cov.pi.y0) Amat <- diag(length(pi.y0)) if (sum(pi.y0 < 0) > 0) { lambda <- solve.QP(Dmat, par.y0, Amat, bvec = -pi.y0)$value } else { lambda <- 0 } w <- rep(NA, length(pi.y0)+1) rho.pi.y0.partial <- cor2pcor(rho.pi.y0) if (length(pi.y0)==2) { w[3] <- .5 * pi^(-1) * acos(rho.pi.y0[1,2]) w[2] <- .5 w[1] <- .5 - .5 * pi^(-1) * acos(rho.pi.y0[1,2]) } else if (length(pi.y0)==3) { rho.pi.y0.partial.12.3 <- (rho.pi.y0[1,2] - rho.pi.y0[1,3] * rho.pi.y0[2,3])/(sqrt(1-rho.pi.y0[1,3]^2) * sqrt(1-rho.pi.y0[2,3]^2)) rho.pi.y0.partial.13.2 <- (rho.pi.y0[1,3] - rho.pi.y0[1,2] * rho.pi.y0[3,2])/(sqrt(1-rho.pi.y0[1,2]^2) * sqrt(1-rho.pi.y0[3,2]^2)) rho.pi.y0.partial.23.1 <- (rho.pi.y0[2,3] - rho.pi.y0[2,1] * rho.pi.y0[3,1])/(sqrt(1-rho.pi.y0[2,1]^2) * sqrt(1-rho.pi.y0[3,1]^2)) w[1] <- .25 * pi^(-1) * (2 * pi - acos(rho.pi.y0[1,2]) - acos(rho.pi.y0[1,3]) - acos(rho.pi.y0[2,3])) w[2] <- .25 * pi^(-1) * (3 * pi - acos(rho.pi.y0.partial.12.3) - acos(rho.pi.y0.partial.13.2) - acos(rho.pi.y0.partial.23.1)) w[3] <- .5 - w[1] w[4] <- .5 - w[2] } else if (length(pi.y0)==4) { w[4] <- .125 * pi^(-1) * (-4 * pi + acos(rho.pi.y0[4,3]) + acos(rho.pi.y0[3,2]) + acos(rho.pi.y0[4,2])) w[3] <- .25 * pi^(-2) * ( acos(rho.pi.y0[4,3]) * (pi - acos(rho.pi.y0.partial[2,1]))) w[2] <- .125 * pi^(-1) * ( 8 * pi - acos(rho.pi.y0[4,3]) + acos(rho.pi.y0[3,2]) + acos(rho.pi.y0[4,2])) w[1] <- pmvnorm(mean = pi.y0, sigma = cov.pi.y0, lower = rep(0, length(pi.y0))) w[5] <- .5 - w[1] - w[3] } else if (length(pi.y0)>4) { draws <- mvrnorm(n = n.draws, mu = par.y0, Sigma = cov.pi.y0) pi.tilde <- matrix(NA, nrow = n.draws, ncol = length(pi.y0)) for(i in 1:n.draws) { if (sum(draws[i,] < 0) > 1) { pi.tilde[i,] <- solve.QP(Dmat, par.y0, Amat, bvec = -draws[i,])$solution + draws[i,] } else { pi.tilde[i,] <- draws[i, ] } } pi.tilde.pos.count <- apply(pi.tilde, 1, function(x) { sum(x > 0) }) for(k in 0:length(pi.y0)) w[k+1] <- mean(pi.tilde.pos.count==(J-k)) } p.y0 <- 0 for(k in 0:length(pi.y0)) p.y0 <- p.y0 + w[k+1] * pchisq(lambda, df = k, lower.tail = FALSE) } else if (length(pi.y0) == 1) { p.y0 <- t.y0 } if ((length(pi.y1) > 0) & (length(pi.y0) > 0)) bonferroni[curr.treat] <- 2 * min(p.y1, p.y0) else if (length(pi.y1) > 0) bonferroni[curr.treat] <- 2 * p.y1 else if (length(pi.y0) > 0) bonferroni[curr.treat] <- 2 * p.y0 else bonferroni[curr.treat] <-1 } names(bonferroni) <- paste("Sensitive Item", treatment.values) bonferroni <- pmin(bonferroni, 1) if (pi.table == FALSE) return.object <- list(p = bonferroni) else return.object <- list(p = bonferroni, pi.table = tb) class(return.object) <- "ict.test" return.object } print.ict.test <- function(x, ...){ cat("\nTest for List Experiment Design Effects\n\n") if (!is.null(x$pi.table)) { cat("Estimated population proportions \n") print(x$pi.table) cat("\n Y_i(0) is the (latent) count of 'yes' responses to the control items. Z_i is the (latent) binary response to the sensitive item.\n\n") } cat("Bonferroni-corrected p-value\nIf this value is below alpha, you reject the null hypothesis of no design effect. If it is above alpha, you fail to reject the null.\n\n") print(x$p) cat("\n") invisible(x) }
.Logistik_edited <- function(data, member, member.type = "group", match = "score", anchor = 1:ncol(data), type = "both", criterion = "LRT", all.cov = FALSE) { R2 <- function(m, n) 1 - (exp(-m$null.deviance / 2 + m$deviance / 2))^(2 / n) R2max <- function(m, n) 1 - (exp(-m$null.deviance / 2))^(2 / n) R2DIF <- function(m, n) R2(m, n) / R2max(m, n) dev <- R2full <- R2simple <- deltaR <- NULL mFull <- mSimple <- seFull <- seSimple <- matrix( 0, ncol(data), 4 ) if (all.cov) { cov.matM0 <- cov.matM1 <- vector("list", ncol(data)) } else { cov.matM0 <- cov.matM1 <- NULL } if (member.type == "group") { GROUP <- as.factor(member) } else { GROUP <- member } for (item in 1:ncol(data)) { if (match[1] == "score") { data2 <- rbind(data[, anchor]) if (sum(anchor == item) == 0) { data2 <- cbind(data2, data[, item]) } SCORES <- rowSums(data2, na.rm = TRUE) } else if (match[1] == "zscore") { data2 <- rbind(data[, anchor]) if (sum(anchor == item) == 0) { data2 <- cbind(data2, data[, item]) } SCORES <- scale(rowSums(data2, na.rm = TRUE)) } else { SCORES <- match } ITEM <- data[, item] m0 <- switch(type, both = glm(ITEM ~ SCORES * GROUP, family = "binomial"), udif = glm(ITEM ~ SCORES + GROUP, family = "binomial"), nudif = glm(ITEM ~ SCORES * GROUP, family = "binomial") ) m1 <- switch(type, both = glm(ITEM ~ SCORES, family = "binomial"), udif = glm(ITEM ~ SCORES, family = "binomial"), nudif = glm(ITEM ~ SCORES + GROUP, family = "binomial") ) if (criterion == "LRT") { dev[item] <- deviance(m1) - deviance(m0) } else { if (criterion != "Wald") { stop("'criterion' must be either 'LRT' or Wald'", call. = FALSE ) } else { coeff <- as.numeric(coef(m0)) covMat <- summary(m0)$cov.scaled if (type == "udif") { C <- rbind(c(0, 0, 1)) } else { if (type == "nudif") { C <- rbind(c(0, 0, 0, 1)) } else { C <- rbind(c(0, 0, 1, 0), c(0, 0, 0, 1)) } } dev[item] <- t(C %*% coeff) %*% solve(C %*% covMat %*% t(C)) %*% C %*% coeff } } R2full[item] <- R2DIF(m0, nrow(data)) R2simple[item] <- R2DIF(m1, nrow(data)) deltaR[item] <- R2DIF(m0, nrow(data)) - R2DIF(m1, nrow(data)) mFull[item, 1:length(m0$coefficients)] <- m0$coefficients mSimple[item, 1:length(m1$coefficients)] <- m1$coefficients seFull[item, 1:length(m0$coefficients)] <- sqrt(diag(vcov(m0))) seSimple[item, 1:length(m1$coefficients)] <- sqrt(diag(vcov(m1))) if (all.cov) { cov.matM0[[item]] <- vcov(m0) } if (all.cov) { cov.matM1[[item]] <- vcov(m1) } } colnames(mFull) <- colnames(mSimple) <- colnames(seFull) <- colnames(seSimple) <- c( "(Intercept)", "SCORE", "GROUP", "SCORE:GROUP" ) res <- list( stat = dev, R2M0 = R2full, R2M1 = R2simple, deltaR2 = deltaR, parM0 = mFull, parM1 = mSimple, seM0 = seFull, seM1 = seSimple, cov.M0 = cov.matM0, cov.M1 = cov.matM1, criterion = criterion, member.type = member.type, match = ifelse(match[1] == "score", "score", ifelse(match[1] == "zscore", "zscore", "matching variable")) ) return(res) }
overallmean <- function(x){ overall <- mean(x) n1 <- dim(x)[1] n2 <- dim(x)[2] matrix(nrow=n1,ncol=n2,data=rep(overall,n1*n2)) }
cr_buildstep <- function(name, args = NULL, id = NULL, prefix = "gcr.io/cloud-builders/", entrypoint = NULL, dir = "", env = NULL, waitFor = NULL, volumes = NULL){ if(is.null(prefix) || is.na(prefix)){ prefix <- "gcr.io/cloud-builders/" } if(!is.null(entrypoint)){ assert_that(is.string(entrypoint)) } if(dir %in% c("",NA)) dir <- NULL if(grepl("^gcr.io", name)){ prefix <- "" } list(structure( rmNullObs(list( name = paste0(prefix, name), entrypoint = entrypoint, args = string_to_list(args), id = id, dir = dir, env = string_to_list(env), volumes = volumes, waitFor = string_to_list(waitFor) )), class = c("cr_buildstep","list"))) } is.cr_buildstep <- function(x){ inherits(x, "cr_buildstep") } cr_buildstep_df <- function(x){ assert_that( is.data.frame(x), all(c('name') %in% names(x)) ) if(is.null(x$prefix)){ x$prefix <- "" } if(is.null(x$dir)){ x$dir <- "" } xx <- x[, intersect(c("name", "args", "id", "prefix", "entrypoint", "dir", "env", "volumes", "waitFor"), names(x))] apply(xx, 1, function(row){ cr_buildstep(name = row[["name"]], args = row[["args"]], id = row[["id"]], prefix = row[["prefix"]], entrypoint = row[["entrypoint"]], env = row[["env"]], volumes = row[["volumes"]], waitFor = row[["waitFor"]], dir = row[["dir"]])[[1]] }) } cr_buildstep_extract <- function(x, step = NULL){ assert_that(is.gar_Build(x)) the_step <- x$steps[[step]] the_step$prefix <- "" do.call(cr_buildstep, args = the_step) } cr_buildstep_edit <- function(x, ...){ xx <- x[[1]] assert_that(is.cr_buildstep(xx)) dots <- list(...) the_name <- dots$name if(is.null(the_name)){ the_name <- xx$name } dots$name <- the_name do.call(cr_buildstep, args = modifyList(xx, dots)) }
AttributeCreate <- function(name, otype=c("V", "E", "G"), g){ otype = match.arg(otype) if(otype=="V"){ g <- set.vertex.attribute(graph=g, name=name, value=NA) } if(otype=="E"){ g <- set.edge.attribute(graph=g, name=name, value=NA) } g } AttributeIsMeaningful <- function(name, otype=c("V", "E", "G"), g){ otype = match.arg(otype) res = switch(EXPR=otype ,V = name %in% list.vertex.attributes(g) ,E = name %in% list.edge.attributes(g) ,G = TRUE ) res } AttributeSet <- function(name, value, oids=NULL, oProgIds=NULL, otype=c("V", "E", "G"), g, add=TRUE, quiet=FALSE){ if(length(oProgIds)>0){ oids = switch(EXPR=otype , none = vector() , V = as.numeric(V(g)[ProgId %in% oProgIds]) , E = as.numeric(E(g)[ProgId %in% oProgIds]) , G = which(names(g$groups) %in% oProgIds) ) } otype = match.arg(otype) if(!AttributeIsMeaningful(name=name, otype=otype, g=g)){ if(add){ g <- AttributeCreate(name=name, otype=otype, g=g) } else { if(!quiet){ type = switch(EXPR=otype, V="vertex", E="edge", G="group") g <- MsgToLogObj(Msg="Attribute `"%.%name%.%"` has no mening for `"%.%type%.%"`. Use add=TRUE or create one by your self.", g=g, add=FALSE) } return(g) } } if(otype=="V"){ g <- set.vertex.attribute(graph=g, name=name, index=oids, value=value) } if(otype=="E"){ g <- set.edge.attribute(graph=g, name=name, index=oids, value=value) } if(otype=="G"){ if (length(g$groups) >= oids){ if(any(g$groups[[oids]]$closed)){ oids = as.numeric(V(g)[ProgId==g$groups[[oids]]$ProgId])[1] g <- Recall(name, value, oids, otype=c("V"), g=g, add=FALSE, quiet=FALSE) } else { if(!all(name %in% c("ProgId", "ProgType"))){ g$groups[[oids]][[name]] = value } } } } g } AttributeAllSet <- function(name, value, g){ AO = GetActiveObject(g=g) if(AO$type!="none"){ g <- AttributeSet(name=name, value=value, oProgIds=AO$ProgId, otype=AO$type, g=g, add=FALSE, quiet=FALSE) } else { if(length(V(g)[selected])>0 || length(E(g)[selected])>0){ VMeaningful = AttributeIsMeaningful(name, otype="V", g=g) EMeaningful = AttributeIsMeaningful(name, otype="E", g=g) if(VMeaningful|EMeaningful){ g <- AttributeSet(name=name, value=value, oids=as.numeric(V(g)[selected]), otype="V", g=g, add=FALSE, quiet=TRUE) g <- AttributeSet(name=name, value=value, oids=as.numeric(E(g)[selected]), otype="E", g=g, add=FALSE, quiet=TRUE) } else { g <- MsgToLogObj(Msg="Attribute `"%.%name%.%"` has no mening nor for vertex nor for edge. Use AttributeCreate to create one.", g=g, add=FALSE) } } else { VO = GetViewObject(g=g) if(VO$type!="none"){ g <- AttributeSet(name=name, value=value, oProgIds=VO$ProgId, otype=VO$type, g=g, add=FALSE, quiet=FALSE) } } } g } AttributeChange <- function(AttrCommand, g){ eq = tryCatch({parse(text=AttrCommand)[[1]]}, error = function(TechnicalMessage) { NULL }, warning = function(TechnicalMessage) { NULL }) name = NULL value = NULL if(is.call(eq)){ if(as.character(eq[[1]])=="=" && length(eq)==3){ name = as.character(eq[[2]]) value = eq[[3]] } } if(!is.null(name) && !is.null(value)){ g <- AttributeAllSet(name=name, value=value, g=g) } else { g <- MsgToLogObj(Msg="Spelling error... try next time.", g=g, add=FALSE) } g }
library(testthat) library(rly) context("Declaration of a state without error") Lexer <- R6::R6Class("Lexer", public = list( tokens = c('NUMBER', 'PLUS','MINUS'), states = list(c('comment', 'exclusive')), t_PLUS = '\\+', t_MINUS = '-', t_NUMBER = '\\d+', t_comment = function(re='/\\*', t) { t$lexer$begin('comment') }, t_comment_body_part = function(re='(.|\\n)*\\*/', t) { t$lexer$begin('INITIAL') }, t_error = function(t) { } ) ) test_that("no error rule for state", { expect_output(rly::lex(Lexer), "WARN .* No error rule is defined for exclusive state 'comment' WARN .* No ignore rule is defined for exclusive state 'comment'") })
confid.int.theta <- function(x, y, method=c("chi-sq", "simulate"), conf.level = 0.95, grd = .001, B = 1000, tol=1e-7, maxit=500){ method <- match.arg(method) alfa<-1-conf.level m<-length(x) n<-length(y) N<-m+n lambda<-m/N z<-c(y,x) r<-rank(z, ties.method = "max")[1:n] theta0<-dd.est(x,y) p0<-phi(N, theta0, lambda)/N res<-mrle.sporm(x, y, theta0, p0) phat<-res$p theta.hat<-res$theta ell<-res$ell if(method == "chi-sq"){ Calfa<-qchisq(conf.level,1) } else{ LR.b<-NULL sim.c<-function(){ u<-runif(m) v<-runif(n) v<-v/(theta.hat-(theta.hat-1)*v) rv<-rank(c(v,u), ties.method = "max")[1:n] theta0<-dd.est(u,v) p0<-phi(N, theta0, lambda)/N ell.star<-try(mrle.sporm(u, v, theta0, p0)$ell, TRUE) p0<-phi(N, theta.hat, lambda)/N 2*(ell.star-try(elltheta(theta.hat, p0, rv, tol, maxit)$ell, TRUE)) } LR.b <- lapply(1:B, function(i) try(sim.c(), TRUE)) LR.b <-unlist(LR.b[sapply(LR.b, function(x) !inherits(x, "try-error"))]) Calfa<-quantile(LR.b, conf.level) } theta<-theta.hat+grd p<-phat LR.theta<-2*(ell-elltheta(theta, p, r, tol, maxit)$ell) while(LR.theta<Calfa){ theta<-theta+grd tmp<-elltheta(theta, p, r, tol, maxit) LR.theta<-2*(ell-tmp$ell) p<-tmp$p } theta.U<-theta theta<-theta.hat-grd LR.theta<-2*(ell-elltheta(theta, p, r, tol, maxit)$ell) while(LR.theta<Calfa & theta>0){ theta<-theta-grd tmp<-elltheta(theta, p, r, tol, maxit) LR.theta<-2*(ell-tmp$ell) p<-tmp$p } theta.L<-theta list(theta.L=theta.L, theta.U=theta.U, theta.hat=theta.hat, Calpha=Calfa) }
x1 = 1 x2 <- 2 x1 x2 (x3 = 3) x = c(1, 5, 4, 9, 0) typeof(x) length(x) x = 1:7; x y <- 2:-2; y seq(1, 3, by=0.2) seq(1, 5, length.out=4) seq(1,5, along.with = c(1,2,13,15,6,7,8,20)) x=c(1,2,13,15,6,7,8,20,30,60); length(x) x1=110:150 length(x1) seq(0,1, length.out = length(x1)) seq(0,1, along.with = x1) seq(0,1, length.out = 60) (x3=c('S1','S2',"S3")) class(x3) (x4=1:10) class(x4) (x5=c(TRUE, FALSE, TRUE)) class(x4) (x5b=c(F, T, T, F)) class(x5b) x = c(1, 5.4, TRUE, "hello") x typeof(x) u = c(10, 20, 30) v = c(1, 2, 3, 4, 5, 6, 7, 8, 9) u + v u[1] u[-3] s = c("aa", "bb", "cc", "dd", "ee") s[c(2, 3)] s[-c(4,5)] s[c(-2,3)] s[c(2,3,2)] s[c(2:5)] s[2:5] s[c(10)] s = c("aa", "bb", "cc", "dd", "ee") L = c(FALSE, TRUE, FALSE, TRUE, FALSE) s[L] x = 1:10 x[x<5] (v = c("Mary", "Sue") ) names(v) = c("First", "Last") v v["First"] v[c("Last", "First")] (x = 1:10) x = NULL x v1 = c(3,8,4,5,0,11) v2 = c(4,11,0,8,1,2) (add.result = v1+v2) (sub.result = v1-v2) (multi.result = v1*v2) (divi.result = v1/v2) (v1/2) v = c(3,8,4,5,0,11, -9, 304) sort(v) sort(v, decreasing=T) nums = scan() nums names = scan(what='character') names x = c(1,4,7,NA,12,19,15,21,20) mean(x) mean(x, na.rm=T) is.na(x) x[!is.na(x)]
mdp_check_square_stochastic <- function(X) { error_msg <- '' s1 <- dim(X)[1] s2 <- dim(X)[2] if (s1 != s2) { error_msg <- 'MDP Toolbox ERROR: Matrix must be square' } else if ( max(abs(rowSums(X) - rep(1,s2))) > 10^(-12) ) { error_msg <- 'MDP Toolbox ERROR: Row sums of the matrix must be 1' } else if (length(which(X < 0)) > 0) { error_msg <- 'MDP Toolbox ERROR: Probabilities must be non-negative' } return(error_msg) }
library("geigen") source("testgsvd.R") A <- matrix(1:15, nrow=5,ncol=3) B <- matrix(c(8,1,6, 3,5,7, 4,9,2), nrow=3) A B z <- gsvd(A,B) testgsvd(z,A,B)
comm.as.gbd <- function(X, balance.method = .pbd_env$SPMD.IO$balance.method, rank.source = .pbd_env$SPMD.CT$rank.source, comm = .pbd_env$SPMD.CT$comm){ COMM.RANK <- spmd.comm.rank(comm) ncol.X <- integer(1) if(COMM.RANK == rank.source){ if(!is.matrix(X)){ stop("X should be a matrix.") } ncol.X <- ncol(X) } ncol.X <- spmd.bcast.integer(ncol.X, rank.source = rank.source, comm = comm) if(COMM.RANK != rank.source){ X <- matrix(0, nrow = 0, ncol = ncol.X) } ret <- comm.load.balance(X, balance.method = balance.method, comm = comm) ret }
data_file <- "../test_data/test_table_pangia.txt" the_list <- load_edge_assignments(data_file, type = 'pangia') expect_that(length(the_list), equals(3)) expect_that(names(the_list)[3], equals("test_all_taxa")) expect_that(load_edge_assignments("../test_data/nonexistentfile.txt", type = 'pangia'), throws_error())
.newMethodObj_ODSDesign <- function(info, par, minData) { npar <- length(x = par) ccObj <- .newMethodObj_CaseCohort(info = list("wg" = 1.0, "wb" = 1.0), par = par[1L:{npar-2L}], minData = minData) return( c(info, ccObj, "pi1" = 1.0 / {1.0 + exp(x = -par[npar-1L])}, "pi2" = 1.0 / {1.0 + exp(x = -par[npar])}) ) } .G <- function(object, ...) { Su <- object$baseInfo$U$S Sv <- object$baseInfo$V$S G1 <- object$I1 * {1.0-Su} + object$I2 * {Su - Sv} G2 <- object$I3 * {Su - Sv} return( list("G1" = G1, "G2" = G2) ) } .dG <- function(object, i, ...) { dSu <- .derivS(object = object$baseInfo$U, i = i, beta = object$baseInfo$beta, et = object$baseInfo$et) dSv <- .derivS(object = object$baseInfo$V, i = i, beta = object$baseInfo$beta, et = object$baseInfo$et) G1t <- -object$I1[i]*dSu$St + object$I2[i]*{dSu$St-dSv$St} G2t <- object$I3[i]*{dSu$St - dSv$St} G1tt <- -object$I1[i]*dSu$Stt + object$I2[i]*{dSu$Stt - dSv$Stt} G2tt <- object$I3[i]*{dSu$Stt - dSv$Stt} return( list("d1g1" = unname(obj = G1t), "d1g2" = unname(obj = G2t), "d2g1" = unname(obj = G1tt), "d2g2" = unname(obj = G2tt)) ) } .d1G <- function(object, ...) { dSu <- .deriv1S(object = object$baseInfo$U, beta = object$baseInfo$beta, et = object$baseInfo$et) dSv <- .deriv1S(object = object$baseInfo$V, beta = object$baseInfo$beta, et = object$baseInfo$et) G1t <- -object$I1*dSu + object$I2*{dSu-dSv} G2t <- object$I3*{dSu - dSv} return( list("d1g1" = unname(obj = G1t), "d1g2" = unname(obj = G2t)) ) } .piece <- function(object) { Gs <- .G(object = object) slz <- sum(log(x = {object$n0 + object$n1*Gs$G1/object$pi1 + object$n2*Gs$G2/object$pi2})) res <- slz + object$n1*log(x = object$pi1) + object$n2*log(x = object$pi2) return( res ) } .dPiece <- function(object) { Gs <- .G(object = object) n1p1 <- object$n1 / object$pi1 n2p2 <- object$n2 / object$pi2 temp2 <- -n1p1*Gs$G1*{1.0-object$pi1} temp3 <- -n2p2*Gs$G2*{1.0-object$pi2} temp4 <- 1.0/{object$n0 + n1p1*Gs$G1 + n2p2*Gs$G2} temp5 <- c(rep(x = 0, times = object$np), object$n1*{1.0-object$pi1}, object$n2*{1.0-object$pi2}) dG1 <- .d1G(object = object) temp1 <- n1p1*dG1$d1g1 + n2p2*dG1$d1g2 n <- nrow(x = temp1) l2xi <- cbind(temp1*temp4, temp2*temp4, temp3*temp4) return( unname(obj = t(x = {t(x = l2xi) + temp5/n})) ) } .ddPiece <- function(object) { np <- object$np + 2L Gs <- .G(object = object) n1p1 <- object$n1 / object$pi1 n2p2 <- object$n2 / object$pi2 term <- object$n0 + n1p1*Gs$G1 + n2p2*Gs$G2 hess2 <- matrix(data = 0.0, nrow = 2L, ncol = 2L) temp33 <- n1p1*Gs$G1*{1.0-object$pi1}/term temp34 <- n2p2*Gs$G2*{1.0-object$pi2}/term hess2[1L,1L] <- sum(temp33*{temp33 - 1.0}) hess2[1L,2L] <- sum(temp33*temp34) hess2[2L,2L] <- sum(temp34*{temp34 - 1.0}) res <- matrix(data = 0.0, nrow = {np-2L}, ncol = {np-2L}) hess4 <- matrix(data = 0.0, nrow = 2L, ncol = {np-2L}) for (i in 1L:length(x = term)) { dGs <- .dG(object = object, i = i) temp21 <- n1p1*dGs$d2g1 + n2p2*dGs$d2g2 temp22 <- n1p1*dGs$d1g1 + n2p2*dGs$d1g2 temp22 <- temp22 %o% temp22 itemp23 <- 1.0/term[i] res <- res + {-temp21 + temp22*itemp23}*itemp23 temp31 <- n1p1*dGs$d1g1*{1.0-object$pi1}*itemp23 temp32 <- n2p2*dGs$d1g2*{1.0-object$pi2}*itemp23 temp35 <- {n1p1*dGs$d1g1 + n2p2*dGs$d1g2}*itemp23 hess4[1L,] <- hess4[1L,] + {temp31 - temp35*temp33[i]} hess4[2L,] <- hess4[2L,] + {temp32 - temp35*temp34[i]} } hess1 <- res hess3 <- t(x = hess4) hess2[1L,1L] <- hess2[1L,1L] - object$n1*object$pi1*(object$pi1-1L) hess2[2L,2L] <- hess2[2L,2L] - object$n2*object$pi2*(object$pi2-1L) hess2[2L,1L] <- hess2[1L,2L] return( -rbind(cbind(hess1, hess3),cbind(hess4,hess2)) ) } .loglik_ODSDesign <- function(object, ...) { if (object$pi1 < 1e-8 || object$pi1 >= 1.0) return( Inf ) if (object$pi2 < 1e-8 || object$pi2 >= 1.0) return( Inf ) logl <- - .loglik_CaseCohort(object = object, ...) res <- -{logl - .piece(object = object)} if (is.nan(x = res)) return( Inf ) return( res ) } .dloglik_ODSDesign <- function(object, ...) { lxi <- .dloglik_CaseCohort(object = object, ...) lxi <- cbind(-lxi, 0.0, 0.0) l2 <- .dPiece(object = object) lxi <- lxi - l2 return( -lxi ) } .ddloglik_ODSDesign <- function(object, ...) { hess1 <- .ddloglik_CaseCohort(object = object, ...) hess1 <- rbind(cbind(hess1, 0, 0),0,0) hess2 <- .ddPiece(object = object) return( unname(obj = hess1 + hess2) ) } .se_ODSDesign <- function(object, ...) { np <- length(x = object$baseInfo$beta) Jh <- .ddloglik_ODSDesign(object = object) res <- .dloglik_ODSDesign(object = object) V0 <- cov(x = res[1L:object$n0,,drop=FALSE]) V1 <- cov(x = res[{object$n0+1L}:{object$n0+object$n1},,drop=FALSE]) V2 <- cov(x = res[{object$n0+object$n1+1L}:{object$n0+object$n1+object$n2},,drop=FALSE]) Sigh <- object$n0*V0 + object$n1*V1 + object$n2*V2 Sigma <- ginv(X = Jh) %*% Sigh %*% ginv(X = Jh) se <- sqrt(x = diag(Sigma[1L:np,1L:np,drop=FALSE])) names(x = se) <- names(x = object$baseInfo$beta) return( se ) }
add_nodes_attr <- function(graph, input = "df", data, dir_path = NULL, layer = NULL, index = "Id", include = "all"){ if(!inherits(graph, "igraph")){ stop("'graph' must be an object of class 'igraph'.") } else if (is.null(igraph::V(graph)$name)){ stop("'graph' must have nodes' names.") } nds.names <- as.character(igraph::V(graph)$name) if(input == "df"){ if(!inherits(data, "data.frame")){ stop("'data' must be a data.frame when 'input = 'df''.") } else if (!(index %in% names(data))){ stop("'index' must be the name of a column of 'data'.") } } else if (input == "shp"){ if(any(c(is.null(dir_path), is.null(layer)))){ stop("'dir_path' and 'layer' must be character strings when 'input = 'shp''.") } else if(!all(c(inherits(dir_path, "character"), inherits(layer, "character")))){ stop("'dir_path' and 'layer' must be character strings when 'input = 'shp''.") } else { sink("aux") data <- suppressWarnings(sf::as_Spatial(sf::st_read(dsn = dir_path, layer = layer))) sink(NULL) data <- data.frame(data@data) if (!(index %in% names(data))){ stop("'index' must be the name of a column of the attribute table of 'layer'.") } } } else { stop("You must specify a correct 'input' option ('df' or 'shp').") } data.names <- as.character(data[, index]) if(!all(nds.names %in% data.names)){ stop("Column 'index' from input data must contain the nodes names of 'data'.") } data <- data[which(data.names %in% nds.names), ] data <- data[match(nds.names, data[, index]), ] attrib <- setdiff( names(data), index ) if(inherits(include, "character")){ if(length(include) > 1){ attrib <- attrib[which(attrib %in% include)] } else if (include == "all"){ NULL } else { attrib <- attrib[which(attrib %in% include)] } } else { stop("'include' must be a character string or a vector of character strings.") } if(length(attrib) == 0){ stop("Elements of 'include' must be attributes names from input data.") } for (i in 1:length(attrib)){ graph <- igraph::set_vertex_attr(graph, attrib[i], value = data[, attrib[i]]) } return(graph) }
order_eepg <- function(size,spec,lambda,a,k,n,alpha=0.05,...){ sample <- qeepg(initial_order(size,k,n),spec,lambda,a,...) pdf <- factorial(size)*cumprod(deepg(sample,spec,lambda,a,...))[size] if(size>5){ return(list(sample=sample,pdf=pdf,ci_median=interval_median(size,sample,alpha))) } cat("---------------------------------------------------------------------------------------------\n") cat("We cannot report the confidence interval. The size of the sample is less or equal than five.\n") return(list(sample=sample,pdf=pdf)) }
simulationSpectral <-function(object,conditionalSimulation=FALSE,Ncos=100){ theta <- object$dmodeltheta lsq <- 1/theta mu <- object$mu sigmas <- object$ssq dimension <- length(theta) omegavar <- 2/lsq omega <- matrix(rnorm(Ncos*dimension,0,sqrt(omegavar)),dimension) phi <- runif(Ncos,-pi,pi) multiplier <- sqrt(sigmas) * sqrt(2/Ncos) fun <- function(xx){ xx <- normalizeMatrix2(data.matrix(xx),0,1,object$normalizexmin,object$normalizexmax) y <- multiplier * colSums( cos(t(xx%*%omega)+ phi)) y <- y+mu return(y) } force(fun) if(conditionalSimulation){ objectSim <- object xsim <- objectSim$x ysim <- object$y-fun(xsim) objectSim$y <- ysim objectSim$yonemu <- ysim objectSim$mu <- 0 force(objectSim) funConditional <- function(xx){ y <- fun(xx) + predict(objectSim,xx)$y return(y) } return(funConditional) }else{ return(fun) } } simulationDecompose <- function(object,nsim=1,xsim,conditionalSimulation=TRUE,returnAll=FALSE,...){ len <- nrow(xsim) noise <- matrix(rnorm(len*nsim),len, nsim) covar <- getCorrelationMatrix(object,xsim) if(conditionalSimulation){ object$returnCrossCor <- TRUE ret <- predict(object,xsim) y <- ret$y psi <- ret$psi covarDifference <- covar - psi %*% object$Psinv %*% t(psi) eigv <- eigen(object$ssq *covarDifference,symmetric=T) covarDecomposed <- eigv$vectors %*% diag(sqrt(abs(eigv$values))) %*% eigv$vectors ysim <- covarDecomposed %*% noise y <- matrix(y,len,nsim) + ysim }else{ eigv <- eigen(object$ssq * covar,symmetric=T) covarDecomposed <- eigv$vectors %*% diag(sqrt(abs(eigv$values))) %*% eigv$vectors y <- object$mu + covarDecomposed %*% noise } res <- list(y=y,psi=covar) if(returnAll) return(res) else return(y) } simulate.kriging <- function(object,nsim=1,seed=NA,xsim,method="decompose",conditionalSimulation=TRUE,Ncos=10,returnAll=FALSE,...){ if (!is.na(seed)){ if (!exists(".Random.seed", envir = .GlobalEnv, inherits = FALSE)) runif(1) R.seed <- get(".Random.seed", envir = .GlobalEnv) set.seed(seed) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } if(method=="decompose"){ simresult <- simulationDecompose(object=object,xsim=xsim,nsim=nsim,conditionalSimulation=conditionalSimulation,returnAll=returnAll) return(simresult) }else if(method=="spectral"){ simresult <- NULL simfun <- NULL for(i in 1:nsim){ res <- simulationSpectral(object=object,conditionalSimulation=conditionalSimulation,Ncos=Ncos) simresult <- cbind(simresult,res(xsim)) simfun <- c(simfun,res) } if(returnAll){ return(list(y=simresult,simfun=simfun)) }else{ return(simresult) } }else{ stop("The specified method used in simulate.kriging does not exist. Use 'decompose' or 'spectal'") } } getCorrelationMatrix <- function(object,x){ x <- normalizeMatrix2(data.matrix(x),0,1,object$normalizexmin,object$normalizexmax) k <- ncol(x) n <- nrow(x) A<-matrix(0,k,n*n) for(i in 1:k){ if(object$types[i]!="factor"){ A[i,]<-as.numeric(as.matrix(dist(x[,i]))) }else { tmp <- outer(x[,i],x[,i],'!=') class(tmp) <- "numeric" A[i,]<-tmp } } theta <- object$dmodeltheta if(object$optimizeP) p <- object$P else p <- rep(2,k) A <- abs(A)^p Psi <- exp(-matrix(colSums(theta*A),n,n)) if(object$useLambda){ lambda <- object$dmodellambda Psi <- Psi+diag(lambda,n) } Psi } simulateFunction <- function(object,nsim=1, seed=NA, method="spectral", xsim=NA, Ncos=10, conditionalSimulation=TRUE){ if(any(is.na(xsim)) & method=="decompose"){ stop("simulatedBenchmarkFunction can not create a benchmark function via simulation, if method=='decompose' and xsim is not provided (is NA).") } force(object) if (!is.na(seed)){ if (!exists(".Random.seed", envir = .GlobalEnv, inherits = FALSE)) runif(1) R.seed <- get(".Random.seed", envir = .GlobalEnv) set.seed(seed) on.exit(assign(".Random.seed", R.seed, envir = .GlobalEnv)) } if(method=="decompose"){ simfit <- simulate(object=object,nsim=nsim,seed=NA,xsim=xsim,conditionalSimulation=conditionalSimulation,returnAll=TRUE) ynew <- simfit$y object$Psi <- simfit$psi object$Psinv <- MASS::ginv(object$Psi) object$x <- xsim object$scaledx <- normalizeMatrix2(data.matrix(xsim),0,1,object$normalizexmin,object$normalizexmax) fun <- list() for(i in 1:nsim){ object$y <- ynew[,i,drop=FALSE] object$yonemu <- ynew[,i,drop=FALSE] - object$mu testFun <- NULL assign("testFun", eval(substitute( function(x){ predict(object,x)$y }, list(object=object) ) ), envir=environment()) fun[[i]] <- testFun } }else if(method=="spectral"){ fun <- list() for(i in 1:nsim){ fun[[i]] <- simulationSpectral(object,conditionalSimulation=conditionalSimulation,Ncos=Ncos) } }else{ stop("The specified method in simulatedBenchmarkFunction does not exist. Use 'decompose' or 'spectal'") } return(fun) }
context("test-leafgl-color_utils") library(leaflet) library(sf) library(jsonify) n = 1e2 df1 = data.frame(id = 1:n, id2 = n:1, x = rnorm(n, 10, 1), y = rnorm(n, 49, 0.8)) pts = st_as_sf(df1, coords = c("x", "y"), crs = 4326) lines = suppressWarnings(st_cast(st_as_sf(atlStorms2005), "LINESTRING")); polys <- suppressWarnings(st_cast(st_as_sf(gadmCHE), "POLYGON")) test_that("Character as color", { m <- leaflet() %>% addGlPoints(data = pts, fillColor = "red", group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = "red", src = TRUE, group = "pts") expect_is(m, "leaflet") expect_identical(m$x$calls[[1]]$method, "addGlifyPointsSrc") rm(m) m <- leaflet() %>% addGlPoints(data = st_sfc(st_geometry(pts)), fillColor = "red", group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolylines(data = lines, color = "red", group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolygons(data = polys, color = "red", group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = "id", group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolylines(data = lines, color = "Name", group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolygons(data = polys, color = "NUTS_ID", group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = " group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolylines(data = lines, color = " group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolygons(data = polys, color = " group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolylines(data = lines, color = "FGN", group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolylines(data = lines, color = "FGN", palette = "rainbow", group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) }) test_that("Formula as color", { m <- leaflet() %>% addGlPoints(data = pts, fillColor = ~id, group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = ~id, palette = "rainbow", group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = ~id, palette = "rainbow", group = "pts", src = TRUE) expect_is(m, "leaflet") expect_null(m$x$calls[[1]]$args[[1]]) rm(m) m <- leaflet() %>% addGlPolylines(data = lines, color = ~Name, group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolylines(data = lines, color = ~Name, palette = "rainbow", group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolylines(data = lines, color = ~Name, palette = "rainbow", group = "lns", src = TRUE); expect_is(m, "leaflet") expect_null(m$x$calls[[1]]$args[[1]]) rm(m) m <- leaflet() %>% addGlPolygons(data = polys, color = ~NAME_1, group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolygons(data = polys, color = ~NAME_1, palette = "rainbow", group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolygons(data = polys, color = ~NAME_1, palette = "rainbow", group = "lns", src = TRUE) expect_is(m, "leaflet") expect_null(m$x$calls[[1]]$args[[1]]) }) test_that("Tables as color", { m <- leaflet() %>% addGlPoints(data = pts, fillColor = cbind(180,1,10), group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = cbind("180","1","10"), group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = cbind(0.12, 0.9, 0.01), group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolylines(data = lines, color = cbind(180,1,10), group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolygons(data = polys, color = cbind(180,1,10), group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = matrix(sample(1:255, nrow(pts)*3, TRUE), ncol = 3), group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolylines(data = lines, color = matrix(sample(1:255, nrow(lines)*3, TRUE), ncol = 3), group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolygons(data = polys, color = matrix(sample(1:255, nrow(polys)*3, TRUE), ncol = 3), group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = data.frame(cbind(180,1,10)), group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = data.frame(matrix(sample(1:255, nrow(pts)*3, TRUE), ncol = 3)), group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) }) test_that("Numeric as color", { m <- leaflet() %>% addGlPoints(data = pts, fillColor = 120L, group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolylines(data = lines, color = 120L, group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolygons(data = polys, color = 120L, group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = 30.43, group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolylines(data = lines, color = 30.43, group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolygons(data = polys, color = 30.43, group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = as.factor(130), group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = as.factor(c("asd")), group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- expect_warning(leaflet() %>% addGlPoints(data = pts, fillColor = as.factor(c("asd","bdc","fds")), group = "pts")) expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolylines(data = lines, fillColor = as.factor(130), group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPolygons(data = polys, color = as.factor(130), group = "lns"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) }) test_that("List as color", { m <- expect_warning(leaflet() %>% addGlPoints(data = pts, fillColor = list(1,2), group = "pts")) expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = list(100), group = "pts") expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = list(matrix(sample(1:255, nrow(pts)*3, replace = T), ncol = 3)), group = "pts") expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = lapply(1:nrow(pts), function(x) matrix(sample(1:255, 3), ncol = 3)), group = "pts") expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- expect_warning(leaflet() %>% addGlPoints(data = pts, fillColor = list(c(100,200), cbind(2,1)), group = "pts")) expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) }) test_that("JSON as color", { m <- leaflet() %>% addGlPoints(data = pts, fillColor = jsonify::to_json(list(matrix(sample(1:255, nrow(pts)*3, replace = T), ncol = 3))), group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = jsonify::to_json(data.frame(r = 54, g = 186, b = 1)), group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- expect_warning(leaflet() %>% addGlPoints(data = pts, fillColor = jsonify::to_json(data.frame(r = c(54, 123), g = c(1, 186), b = c(1, 123))), group = "pts")) expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) }) test_that("Date/POSIX* as color", { m <- leaflet() %>% addGlPoints(data = pts, fillColor = as.POSIXlt(Sys.time(), "America/New_York"), group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = Sys.time(), group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- leaflet() %>% addGlPoints(data = pts, fillColor = Sys.Date(), group = "pts"); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) }) test_that("Warnings / Errors", { m <- expect_warning(leaflet() %>% addGlPoints(data = pts, fillColor = matrix(33:98, ncol = 3, byrow = F), group = "pts")) expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- expect_warning(leaflet() %>% addGlPoints(data = pts, fillColor = data.frame(matrix(33:98, ncol = 3, byrow = F)), group = "pts")); expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) m <- expect_warning(leaflet() %>% addGlPoints(data = pts, fillColor = 1:33, group = "pts")) expect_is(m, "leaflet") expect_is(m$x$calls[[1]]$args[[2]], "json") expect_true(validate_json(m$x$calls[[1]]$args[[2]])) rm(m) expect_warning(expect_error(leaflet() %>% addGlPoints(data = pts, fillColor = cbind("asf","fasd", "fasd"), group = "pts"))) })
knitr::opts_chunk$set( collapse = TRUE, comment = " fig.width=6, fig.height=5 ) library(synr) library(dbscan) library(plotly) plot_color_clusters <- function( color_matrix, cluster_vector, grapheme_vector=NULL, ax_min = 0, ax_max = 1 ) { if (!is.null(grapheme_vector)) { fig <- plot_ly( x=color_matrix[, 1], y=color_matrix[, 2], z=color_matrix[, 3], type="scatter3d", mode="markers", color=cluster_vector, customdata = grapheme_vector, hovertemplate = 'Axis 1: %{x}<br>Axis 2: %{y}<br>Axis 3: %{z}<br>Grapheme: %{customdata}' ) } else { fig <- plot_ly( x=color_matrix[, 1], y=color_matrix[, 2], z=color_matrix[, 3], type="scatter3d", mode="markers", color=cluster_vector, hovertemplate = 'Axis 1: %{x}<br>Axis 2: %{y}<br>Axis 3: %{z}<br>' ) } axis_title_font <- list( family = "Courier New, monospace", size = 18, color = " ) x_axis_layout <- list( title = list( text = "Axis 1", font = axis_title_font ), range = c(ax_min, ax_max) ) y_axis_layout <- list( title = list( text = "Axis 2", font = axis_title_font ), range = c(ax_min, ax_max) ) z_axis_layout <- list( title = list( text = "Axis 3", font = axis_title_font ), range = c(ax_min, ax_max) ) fig <- fig %>% layout( scene = list( xaxis = x_axis_layout, yaxis = y_axis_layout, zaxis = z_axis_layout ) ) %>% partial_bundle() return(fig) } plot_actual_colors <- function( color_matrix, hex_codes, grapheme_vector = NULL, ax_min = 0, ax_max = 1 ) { if (!is.null(grapheme_vector)) { fig <- plot_ly( x=color_matrix[, 1], y=color_matrix[, 2], z=color_matrix[, 3], type="scatter3d", mode="markers", customdata = grapheme_vector, marker = list(color = hex_codes), hovertemplate = 'Axis 1: %{x}<br>Axis 2: %{y}<br>Axis 3: %{z}<br>Grapheme: %{customdata}' ) } else { fig <- plot_ly( x=color_matrix[, 1], y=color_matrix[, 2], z=color_matrix[, 3], type="scatter3d", mode="markers", marker = list(color = hex_codes), hovertemplate = 'Axis 1: %{x}<br>Axis 2: %{y}<br>Axis 3: %{z}<br>' ) } axis_title_font <- list( family = "Courier New, monospace", size = 18, color = " ) x_axis_layout <- list( title = list( text = "Axis 1", font = axis_title_font ), range = c(ax_min, ax_max) ) y_axis_layout <- list( title = list( text = "Axis 2", font = axis_title_font ), range = c(ax_min, ax_max) ) z_axis_layout <- list( title = list( text = "Axis 3", font = axis_title_font ), range = c(ax_min, ax_max) ) fig <- fig %>% layout( scene = list( xaxis = x_axis_layout, yaxis = y_axis_layout, zaxis = z_axis_layout ) ) %>% partial_bundle() return(fig) } p <- Participant$new() g <- Grapheme$new(symbol='A') g$set_colors(c(" p$add_grapheme(g) for (l in LETTERS[2:length(LETTERS)]) { g <- Grapheme$new(symbol=l) g$set_colors(c(" p$add_grapheme(g) } p$get_plot() get_random_color <- function() { r_val <- runif(1, 0, 1) g_val <- runif(1, 0, 1) b_val <- runif(1, 0, 1) alpha_val <- runif(1, 0, 1) hex_val <- rgb(r_val, g_val, b_val, alpha_val) return(hex_val) } p <- Participant$new(id="1") for (l in LETTERS) { g <- Grapheme$new(symbol=l) g$set_colors(c(get_random_color(), get_random_color(), get_random_color()), "Luv") p$add_grapheme(g) } p$get_plot() get_bluegreenish_color <- function(green_val) { hex_val <- rgb(0, green_val, 1, 1) return(hex_val) } p <- Participant$new(id="1") green_vals <- seq(0, 1, length.out = length(LETTERS) * 3) val_counter <- 1 for (l in LETTERS) { g <- Grapheme$new(symbol=l) g$set_colors( c( get_bluegreenish_color(green_vals[val_counter]), get_bluegreenish_color(green_vals[val_counter + 1]), get_bluegreenish_color(green_vals[val_counter + 2]) ), "Luv" ) p$add_grapheme(g) val_counter <- val_counter + 3 } p$get_plot() p <- Participant$new(id="1") green_vals <- seq(0.65, 0.8, length.out = length(LETTERS) * 3) val_counter <- 1 for (l in LETTERS) { g <- Grapheme$new(symbol=l) g$set_colors( c( get_bluegreenish_color(green_vals[val_counter]), get_bluegreenish_color(green_vals[val_counter + 1]), get_bluegreenish_color(green_vals[val_counter + 2]) ), "Luv" ) p$add_grapheme(g) val_counter <- val_counter + 3 } p$get_plot() get_random_color_bright_color <- function() { r_val <- runif(1, 0.3, 1) g_val <- runif(1, 0.3, 1) b_val <- runif(1, 0.3, 1) hex_val <- rgb(r_val, g_val, b_val, 1) return(hex_val) } p <- Participant$new(id="1") for (dig in as.character(0:9)) { g <- Grapheme$new(symbol=dig) g$set_colors( c( get_random_color_bright_color(), get_random_color_bright_color(), get_random_color_bright_color() ), "sRGB" ) p$add_grapheme(g) } color_mat <- p$get_nonna_color_resp_mat() hex_vals <- apply(color_mat, 1, function(x) {rgb(x[1], x[2], x[3])}) mid_point <- apply(color_mat, 2, function(x) {mean(x)}) mid_hex <- rgb(0, 0, 0) plot_actual_colors( rbind(color_mat, mid_point), c(hex_vals, mid_hex), c(rep(0:9, each=3), 'MIDDLE') ) p_dbscan_ex <- Participant$new(id="1") resp_colors <- c( ' ' ' ' ' ' ' ) resp_counter <- 1 for (dig in as.character(0:6)) { g <- Grapheme$new(symbol=dig) g$set_colors( c( resp_colors[resp_counter], resp_colors[resp_counter+1], resp_colors[resp_counter+2] ), "sRGB" ) p_dbscan_ex$add_grapheme(g) resp_counter <- resp_counter + 3 } color_mat_dbscan_ex <- p_dbscan_ex$get_nonna_color_resp_mat() hex_vals_dbscan_ex <- apply(color_mat_dbscan_ex, 1, function(x) {rgb(x[1], x[2], x[3])}) plot_actual_colors( color_mat_dbscan_ex, hex_vals_dbscan_ex, rep(0:6, each=3) ) dbscan_res_dbscan_ex <- dbscan(color_mat_dbscan_ex, eps = 0.15, minPts = 3) plot_color_clusters( color_mat_dbscan_ex, dbscan_res_dbscan_ex$cluster, rep(0:6, each=3) ) get_bluereddish_color <- function(red_val) { hex_val <- rgb(red_val, 0, 1, 1) return(hex_val) } p <- Participant$new(id="1") letts1 <- LETTERS[1:6] letts2 <- LETTERS[7:12] green_vals <- seq(0, 1, length.out = length(letts1) * 3) red_vals <- seq(0, 1, length.out = length(letts1) * 3) val_counter <- 1 for (l in letts1) { g <- Grapheme$new(symbol=l) g$set_colors( c( get_bluegreenish_color(green_vals[val_counter]), get_bluegreenish_color(green_vals[val_counter + 1]), get_bluegreenish_color(green_vals[val_counter + 2]) ), "sRGB" ) p$add_grapheme(g) val_counter <- val_counter + 3 } red_vals <- seq(0, 1, length.out = length(letts1) * 3) val_counter <- 1 for (l in letts2) { g <- Grapheme$new(symbol=l) g$set_colors( c( get_bluereddish_color(green_vals[val_counter]), get_bluereddish_color(green_vals[val_counter + 1]), get_bluereddish_color(green_vals[val_counter + 2]) ), "sRGB" ) p$add_grapheme(g) val_counter <- val_counter + 3 } color_mat <- p$get_nonna_color_resp_mat() hex_vals <- apply(color_mat, 1, function(x) {rgb(x[1], x[2], x[3])}) plot_actual_colors( color_mat, hex_vals, c(rep(letts1, each=3), rep(letts2, each=3)) ) pg <- create_participantgroup( raw_df=synr_exampledf_large, n_trials_per_grapheme=3, id_col_name="participant_id", symbol_col_name="trial_symbol", color_col_name="response_color", time_col_name="response_time", color_space_spec="Luv" ) set.seed(1) val_df <- pg$check_valid_get_twcv_scores( min_complete_graphemes = 4, dbscan_eps = 20, dbscan_min_pts = 4, max_var_tight_cluster = 150, max_prop_single_tight_cluster = 0.6, safe_num_clusters = 3, safe_twcv = 250, complete_graphemes_only = TRUE, symbol_filter = LETTERS ) head(val_df) val_id_df <- cbind(list(participant_id=pg$get_ids()), val_df) head(val_id_df)
Profile_likelihood_cd_nm_joint_D_KT_neg <- function (par,listr,x, Zestfun,...,v, silly=-10^(40)) { n <- NULL sig <- NULL sumX <- NULL no_of_roots <- NULL no_of_roots_star <- NULL temp <- NULL Zq <- NULL Zstarq <- NULL xstar <- NULL xdstar <- NULL s <- NULL cond_alphas <- NULL cond_ord_dep <- NULL cond_ord_pairs <- NULL vdep <- NULL z <- list() Pl <- silly X <- vector('list',length(listr)) Y <- vector('list',length(listr)) Z <- vector('list',length(listr)) Zstar <- vector('list',length(listr)) index_alpha <- seq(1,((2*(length(listr)) ) -1),by=2) index_beta <- seq(2,((2*(length(listr)) ) ),by=2) alpha <- par[index_alpha] beta <- par[index_beta] xstar <- rep(v,(length(listr)-1)) xdstar <- rep(v,(length(listr)-1)) xdepstar <- rep(vdep,length(listr)) for(i in 1:length(listr)) { cond_alphas[i] <- ((alpha[i]) <= 1) temp <- as.matrix(listr[[i]]) X[[i]] <- temp[,1][temp[,1]>x] vdep[i] <- max(X[[i]]) n[i] <- length(X[[i]]) Y[[i]] <- temp[,2][temp[,1]>x] Z[[i]] <- (Y[[i]] - alpha[i]*X[[i]])/(X[[i]]^beta[i]) Zstar[[i]] <- (Y[[i]] + X[[i]]) Zq[i] <- Zestfun(Z[[i]],...) Zstarq[i] <- Zestfun(Zstar[[i]],...) sig[i] <- (1/n[i]) * sum ((Z[[i]]-mean(Z[[i]]))^2) sumX[i] <- sum(beta[i]*log(X[[i]])) } if(all(cond_alphas==TRUE)) { for(i in 1:length(listr)) { temp_roots_star <- roots(lev=v,a=alpha[i],c=-1, b=beta[i],d=0,Zj=Zq[i], Zk=Zstarq[i]) xdepstar[i] <- temp_roots_star$xstar } } if(all(alpha <= 1) & all(alpha >= -1) & all(beta <= 1) & all(cond_alphas==TRUE)) { for(j in 1:length(listr)) { cond_ord_dep[j] <- ( -1 <= (min(alpha[j],(Dcond(v,alpha[j],beta[j],-1,0, Zq[j],Zstarq[j])), (Dcond(xdepstar[j],alpha[j],beta[j],-1, 0,Zq[j], Zstarq[j]))))) } condition <- (all(cond_ord_dep==TRUE)) if( condition == TRUE ) { Pl <- sum(((-(n/2)*log (2*pi*sig)) - sumX - (n/2))) } if(condition == FALSE) { Pl <- silly } } if((all(alpha <= 1) ==FALSE) || (all(alpha >= -1)==FALSE) || (all(beta < 1)==FALSE) || (all(cond_alphas==TRUE)==FALSE)) { Pl <- silly } z$Pl <- Pl return(z$Pl) }
[ { "title": "Putting together multinomial discrete regressions by combining simple logits", "href": "http://andrewgelman.com/2011/06/29/putting_togethe/" }, { "title": "R in the insurance industry", "href": "http://www.magesblog.com/2011/09/r-and-insurance.html" }, { "title": "Strategy Diversification in R – follow up", "href": "https://rbresearch.wordpress.com/2012/06/25/strategy-diversification-in-r-follow-up/" }, { "title": "R 3.0.0 and Raring Ringtail (Ubuntu 13.04)", "href": "http://www.personal.psu.edu/mar36/blogs/the_ubuntu_r_blog/2013/04/r-300-and-raring-ringtail-ubuntu-1304.html" }, { "title": "Videos from ‘Tiki + PluginR’ workshop available", "href": "http://ueb.vhir.org/blogpost3-Videos-from-27Tiki-2B-PluginR-27-workshop-available" }, { "title": "Set up R/Stan on Amazon EC2", "href": "http://christophergandrud.blogspot.com/2014/12/set-up-rstan-on-amazon-ec2.html" }, { "title": "Temporal networks with igraph and R (with 20 lines of code!)", "href": "http://estebanmoro.org/2012/11/temporal-networks-with-igraph-and-r-with-20-lines-of-code/" }, { "title": "(Another) introduction to R", "href": "https://beckmw.wordpress.com/2013/05/27/another-introduction-to-r/" }, { "title": "Improved filtfilt() for R", "href": "http://dankelley.github.io//r/2014/02/19/filtfilt.html" }, { "title": "A quick look at RStudio’s R notebooks", "href": "http://www.win-vector.com/blog/2016/10/a-quick-look-at-rstudios-r-notebooks/" }, { "title": "Unit root versus breaking trend: Perron’s criticism", "href": "http://programming-r-pro-bro.blogspot.com/2011/11/unit-root-versus-breaking-trend-perrons.html" }, { "title": "The shadows and light of models", "href": "https://feedproxy.google.com/~r/PortfolioProbeRLanguage/~3/1s4TcXWKKSg/" }, { "title": "Banging on the JGBs", "href": "http://timelyportfolio.blogspot.com/2013/04/banging-on-jgbs.html" }, { "title": "Contribute to The R Journal with LyX/knitr", "href": "http://yihui.name/en/2013/02/contribute-to-the-r-journal-with-lyx-knitr/" }, { "title": "New shinyjs version: Useful tools for any Shiny app developer + easily call JavaScript functions as R code", "href": "http://deanattali.com/2015/05/31/shinyjs-extend/" }, { "title": "Rcpp11 3.1.2.0", "href": "https://web.archive.org/web/http://blog.r-enthusiasts.com/2014/11/10/rcpp11-3-1-2-0/" }, { "title": "How to save high frequency data in mongodb", "href": "https://web.archive.org/web/http://www.investuotojas.eu/2012/02/24/save-high-frequency-data-in-mongodb/" }, { "title": "use simplify to remove redundancy of enriched GO terms", "href": "https://web.archive.org/web/http://ygc.name/2015/10/21/use-simplify-to-remove-redundancy-of-enriched-go-terms/" }, { "title": "R activity around the world", "href": "http://blog.rapporter.net/2014/04/r-activity-around-world.html?utm_source=feedburner&utm_medium=feed&utm_campaign=Feed%3A+rapporter-r+%28R+stories+by+Rapporter%29" }, { "title": "analyze the basic stand alone medicare claims public use files (bsapufs) with r and monetdb", "href": "http://www.asdfree.com/2012/12/analyze-basic-stand-alone-medicare.html" }, { "title": "Naive Bayes: A Generative Model and Big Data Classifier", "href": "https://www.rstudio.com/rviews/2016/11/02/naive-bayes-a-generative-model-and-big-data-classifier/" }, { "title": "Timeline graph with ggplot2", "href": "https://web.archive.org/web/http://fishyoperations.com/r/timeline-graph-with-ggplot2/?utm_source=rss&utm_medium=rss&utm_campaign=timeline-graph-with-ggplot2" }, { "title": "Maximum likelihood", "href": "http://www.quantumforest.com/2011/10/maximum-likelihood/" }, { "title": "Loops in R: Think different", "href": "http://blog.revolutionanalytics.com/2010/11/loops-in-r.html" }, { "title": "Animated GIF Annual Correlation of 48 Industries for 50 Years", "href": "http://timelyportfolio.blogspot.com/2012/08/animated-gif-annual-correlation-of-48.html" }, { "title": "Ack! Duplicates in the Data!", "href": "https://rforwork.info/2012/05/04/ack-duplicates/" }, { "title": "Look Familiar? Mapping in R", "href": "http://0utlier.blogspot.com/2013/05/look-familiar-mapping-in-r.html" }, { "title": "Reduce Memory Use for Large Datasets", "href": "http://www.rtexttools.com/blog/reduce-memory-use-for-large-datasets" }, { "title": "Horizon plots with ggplot (not)", "href": "http://stevepowell.blot.im/horizon-plots-with-ggplot-not/" }, { "title": "R Style Guide", "href": "https://csgillespie.wordpress.com/2010/11/23/r-style-guide/" }, { "title": "Flying: Boredom and Terror", "href": "https://feedproxy.google.com/~r/graphoftheweek/fzVA/~3/KpRN-JToFiU/flying-boredom-and-terror.html" }, { "title": "Preferential attachment applied to frequency of accessing a variable", "href": "http://shape-of-code.coding-guidelines.com/2013/05/17/preferential-attachment-applied-to-frequency-of-accessing-a-variable/" }, { "title": "R and RStudio incompatibility with Yosemite Mac OS X 10.10", "href": "http://www.compmath.com/blog/2014/10/r-and-rstudio-incompatibility-with-yosemite-mac-os-x-10-10/" }, { "title": "analyze the progress in international reading literacy study (pirls) with r", "href": "http://www.asdfree.com/2015/06/analyze-progress-in-international.html" }, { "title": "BayesFactor version 0.9.10 released to CRAN", "href": "http://bayesfactor.blogspot.com/2015/02/bayesfactor-version-0910-released-to.html" }, { "title": "Cleaning up oversized github repositories for R and beyond", "href": "http://robinlovelace.net/r/2014/06/25/pruning-a-giant-gh-repo.html" }, { "title": "Simple Pharmacokinetics with Jags", "href": "http://wiekvoet.blogspot.com/2014/03/simple-pharmacokinetics-with-jags.html" }, { "title": "Shiny 0.12: Interactive Plots with ggplot2", "href": "https://blog.rstudio.org/2015/06/16/shiny-0-12-interactive-plots-with-ggplot2/" }, { "title": "Another release day: ggRandomForests V1.1.3", "href": "https://jehrlinger.wordpress.com/2015/01/08/another-release-day-ggrandomforests-v1-1-3/" }, { "title": "More Recursion in R", "href": "http://www.stat.tamu.edu/site-directory/?q=node%2F48" }, { "title": "Using Sweave", "href": "https://web.archive.org/web/http://sharpstatistics.co.uk/r/using-sweave/" }, { "title": "A Data Scientist’s Perspective on Microsoft R", "href": "http://blog.revolutionanalytics.com/2016/04/data-scientist-perspective.html" }, { "title": "Yet another post on google scholar data analysis", "href": "http://tuxette.nathalievilla.org/?p=1682" }, { "title": "Using R for Introductory Statistics, Chapter 5, hypergeometric distribution", "href": "http://digitheadslabnotebook.blogspot.com/2011/02/using-r-for-introductory-statistics_21.html" }, { "title": "How to Add WAR Metrics to your Lahman Database", "href": "https://www.datascienceriot.com/how-to-add-war-metrics-to-your-lahman-database/kris/" }, { "title": "Bertrand or (The Importance of Defining Problems Properly)", "href": "https://aschinchon.wordpress.com/2015/05/13/bertrand-or-the-importance-of-defining-problems-properly/" }, { "title": "I’m Hiring!", "href": "http://www.gettinggeneticsdone.com/2012/02/im-hiring.html" }, { "title": "Secure HTTPS Connections for R", "href": "https://blog.rstudio.org/2015/08/17/secure-https-connections-for-r/" }, { "title": "Colouring a 3D plot according to z-values", "href": "https://rtricks.wordpress.com/2009/05/03/colouring-a-3d-plot-according-to-z-values/" }, { "title": "”How to draw the line” with ggplot2", "href": "https://martinsbioblogg.wordpress.com/2013/05/31/how-to-draw-the-line-with-ggplot2/" } ]
ggm_compare <- function(Yg1, Yg2, method = "spearman", alpha = 0.05){ fit1 <- ggm_inference(Yg1, method = method) fit2 <- ggm_inference(Yg2, method = method) diff <- fit1$boot_samps - fit2$boot_samps ci_lower <- alpha / 2 ci_upper <- 1 - ci_lower p <- ncol(Yg1) adj <- wadj <- matrix(0, nrow = p, ncol = p) cis <- t(apply(diff, 2, quantile, probs = c(ci_lower, ci_upper))) adj[upper.tri(adj)] <- ifelse(cis[, 1] < 0 & cis[, 2] > 0, 0, 1) adj <- symm_mat(adj) wadj[upper.tri(diag(p))] <- colMeans(diff) wadj <- symm_mat(wadj) * adj returned_object <- list(adj = adj, wadj = wadj, cis = cis) class(returned_object) <- c("ggmnonreg", "ggm_compare") return(returned_object) }
scale_point_colour_discrete = function(..., aesthetics = "point_colour") scale_colour_hue(..., aesthetics = aesthetics) scale_point_color_discrete = scale_point_colour_discrete scale_point_colour_continuous = function(..., aesthetics = "point_colour", guide = "colourbar2") { scale_colour_continuous(..., aesthetics = aesthetics, guide = guide) } scale_point_color_continuous = scale_point_colour_continuous scale_point_fill_discrete = function(..., aesthetics = "point_fill") scale_colour_hue(..., aesthetics = aesthetics) scale_point_fill_continuous = function(..., aesthetics = "point_fill", guide = "colourbar2") { scale_colour_continuous(..., aesthetics = aesthetics, guide = guide) } scale_point_alpha_continuous = function(..., range = c(0.1, 1)) { continuous_scale("point_alpha", "point_alpha_c", rescale_pal(range), ...) } scale_point_alpha_discrete = function(..., range = c(0.1, 1)) { discrete_scale("point_alpha", "point_alpha_d", function(n) seq(range[1], range[2], length.out = n), ...) } scale_point_size_continuous = function(..., range = c(1, 6)) continuous_scale("point_size", "point_size_c", area_pal(range), ...) scale_point_size_discrete = function(..., range = c(1, 6), na.translate = FALSE) { force(range) discrete_scale("point_size", "point_size_d", function(n) seq(range[1], range[2], length.out = n), na.translate = na.translate, ...) } scale_interval_colour_discrete = function(..., aesthetics = "interval_colour") scale_colour_hue(..., aesthetics = aesthetics) scale_interval_color_discrete = scale_interval_colour_discrete scale_interval_colour_continuous = function(..., aesthetics = "interval_colour", guide = "colourbar2") { scale_colour_continuous(..., aesthetics = aesthetics, guide = guide) } scale_interval_color_continuous = scale_interval_colour_continuous scale_interval_alpha_continuous = function(..., range = c(0.1, 1)) { continuous_scale("interval_alpha", "interval_alpha_c", rescale_pal(range), ...) } scale_interval_alpha_discrete = function(..., range = c(0.1, 1)) { discrete_scale("interval_alpha", "interval_alpha_d", function(n) seq(range[1], range[2], length.out = n), ...) } scale_interval_size_continuous = function(..., range = c(1, 6)) continuous_scale("interval_size", "interval_size_c", area_pal(range), ...) scale_interval_size_discrete = function(..., range = c(1, 6), na.translate = FALSE) { force(range) discrete_scale("interval_size", "interval_size_d", function(n) seq(range[1], range[2], length.out = n), na.translate = na.translate, ...) } scale_interval_linetype_discrete = function(..., na.value = "blank") { discrete_scale("interval_linetype", "interval_linetype_d", linetype_pal(), na.value = na.value, ...) } scale_interval_linetype_continuous = function(...) { stop0("A continuous variable cannot be mapped to linetype") } scale_slab_colour_discrete = function(..., aesthetics = "slab_colour") scale_colour_hue(..., aesthetics = aesthetics) scale_slab_color_discrete = scale_slab_colour_discrete scale_slab_colour_continuous = function(..., aesthetics = "slab_colour", guide = "colourbar2") { scale_colour_continuous(..., aesthetics = aesthetics, guide = guide) } scale_slab_color_continuous = scale_slab_colour_continuous scale_slab_fill_discrete = function(..., aesthetics = "slab_fill") scale_colour_hue(..., aesthetics = aesthetics) scale_slab_fill_continuous = function(..., aesthetics = "slab_fill", guide = "colourbar2") { scale_colour_continuous(..., aesthetics = aesthetics, guide = guide) } scale_slab_alpha_continuous = function(..., limits = function(l) c(min(0, l[[1]]), l[[2]]), range = c(0, 1)) { continuous_scale("slab_alpha", "slab_alpha_c", rescale_pal(range), limits = limits, ...) } scale_slab_alpha_discrete = function(..., range = c(0.1, 1)) { discrete_scale("slab_alpha", "slab_alpha_d", function(n) seq(range[1], range[2], length.out = n), ...) } scale_slab_size_continuous = function(..., range = c(1, 6)) continuous_scale("slab_size", "slab_size_c", area_pal(range), ...) scale_slab_size_discrete = function(..., range = c(1, 6), na.translate = FALSE) { force(range) discrete_scale("slab_size", "slab_size_d", function(n) seq(range[1], range[2], length.out = n), na.translate = na.translate, ...) } scale_slab_linetype_discrete = function(..., na.value = "blank") { discrete_scale("slab_linetype", "slab_linetype_d", linetype_pal(), na.value = na.value, ...) } scale_slab_linetype_continuous = function(...) { stop0("A continuous variable cannot be mapped to linetype") } scale_slab_shape_discrete = function(..., solid = TRUE) { discrete_scale("slab_shape", "slab_shape_d", shape_pal(solid), ...) } scale_slab_shape_continuous = function (...) { stop0("A continuous variable cannot be mapped to shape") } guide_colourbar2 = function(...) { guide_colourbar(available_aes = union(guide_colourbar()$available_aes, c( "point_colour", "point_fill", "interval_colour", "slab_colour", "slab_fill" ))) } guide_colorbar2 = guide_colourbar2
setClass("FLXMRnlm", representation(start = "list", family = "character", refit = "function"), contains = "FLXMR") utils::globalVariables(c("w")) FLXMRnlm <- function(formula = .~., family = c("gaussian", "Gamma"), start = list(), offset = NULL) { formula <- as.formula(formula) family <- match.arg(family) z <- new("FLXMRnlm", weighted = TRUE, formula = formula, start = start, name = paste("FLXMRnlm", family, sep=":"), offset = offset, family = family, refit = refit) if(family=="gaussian"){ z@defineComponent <- function(para){ predict <- function(x, ...){ data0 <- data.frame(x) startEnv <- new.env(hash = FALSE, parent = environment(formula)) for (i in names(para$start)) assign(i, para$coef[[i]], envir = startEnv) p <- eval(formula[[3L]], data0, startEnv) p } logLik <- function(x, y, ...) dnorm(y, mean = predict(x, ...), sd = para$sigma, log = TRUE) new("FLXcomponent", parameters=list(coef = para$coef, sigma = para$sigma), logLik = logLik, predict = predict, df = para$df) } z@fit <- function(formula, start, x, y, w) { fit <- nls.wfit(formula = formula, start = start, data = data.frame(x,y,w)) z@defineComponent(para = list(coef = coef(fit), start = as.list(fit$start), df = length(fit$start)+1, sigma = sqrt(sum(fit$weights * fit$residuals^2 / mean(fit$weights))/ (fit$df.residuals)))) } }else if(family=="Gamma"){ z@defineComponent <- function(para){ predict <- function(x, ...){ dotarg <- list(...) if("offset" %in% names(dotarg)) offset <- dotarg$offset p <- sapply(seq_len(nrow(x)), function(i) { eval(parse(text = as.formula(formula[[3L]][[3L]])$term( unlist(para$coef),x[i,]))) }) p <- as.matrix(p) } logLik <- function(x, y, ...) { dgamma(y, shape = para$shape, scale = predict(x, ...)/para$shape, log = TRUE)} new("FLXcomponent", parameters = list(coef = para$coef, shape = para$shape), predict = predict, logLik = logLik, df = para$df) } z@fit <- function(formula, start, x, y, w) { fit <- gnm.wfit(formula = formula, start = start, data= data.frame(x,y,w), family = Gamma(link="identity")) z@defineComponent(para = list(fit = fit, coef = fit$coefficients, df = length(start)+1, shape = sum(fit$prior.weights)/ fit$deviance)) } } else stop(paste("Unknown family", family)) z } setMethod("FLXgetModelmatrix", signature(model = "FLXMRnlm"), function(model, data, formula, start = list(),...) { if(is.null(model@formula)) model@formula <- formula model@fullformula <- update.formula(formula, model@formula) mt <- terms(formula, data = data) varNamesRHS <- all.vars(formula[[3L]]) prednames <- varNamesRHS[varNamesRHS %in% names(data)] model@x <- as.matrix(data[prednames]) response <- all.vars(update(formula, . ~ 1)) model@y <- as.matrix(data[response]) model }) setMethod("FLXmstep", signature(model = "FLXMRnlm"), function(model, weights, components,...) { sapply(seq_len(ncol(weights)), function(k) { if(length(names(components[[k]]@parameters))==0) model@fit(model@formula, model@start[[k]], model@x, model@y, weights[,k]) else model@fit(model@formula, as.list(components[[k]]@parameters$coef), model@x, model@y, weights[,k]) }) }) setMethod("FLXdeterminePostunscaled", signature(model = "FLXMRnlm"), function(model, components, ...) { sapply(components, function(x) x@logLik(model@x, model@y)) }) nls.wfit <- function(formula, start, data = list()) { w <- data$w fit <- nls(formula = formula, start = start, data = data, weights = as.vector(w)) startEnv <- new.env(hash = FALSE, parent = environment(formula)) for (i in names(start)) assign(i, coef(fit)[[i]], envir = startEnv) fit$fitted.values <- eval(formula[[3L]], data, startEnv) response <- all.vars(update(formula, . ~ 1)) fit$residuals <- as.vector(residuals(fit)) fit$df.residuals <- df.residual(fit) fit$weights <- weights(fit) fit$formula <- formula fit$start <- coef(fit) fit } gnm.wfit <- function(formula, start, data = list(), family = list()) { w <- data$w fit <- gnm(formula = formula, family = Gamma(link = "identity"), data = data, start = unlist(start), weights = as.vector(w), verbose = FALSE, trace = FALSE, checkLinear = TRUE) fit$df.residuals <- df.residual(fit) fit$coefficients <- coef(fit) fit$start <- as.list(unlist(coef(fit))) fit$rank <- fit$rank[1] fit }
print.Pred <- function(obj){ cat("\nPredication Results\n\n") cat("Input: Predicate ", obj$P," Argument: ", obj$A, "\n\n") cat("Predication Vector ($PA):\n ",(obj$PA), "\n\n") cat("Predicate Vector without Argument ($P.Pred):\n ",(obj$P.Pred), "\n\n") cat("Used neighborhood words ($neighbors):\n", obj$neighbors, "\n") invisible(obj) } Predication <- function(P,A,m,k,tvectors=tvectors,breakdown=FALSE,norm="none"){ if(is.data.frame(tvectors)){ tvectors <- as.matrix(tvectors) }else if("textmatrix" %in% class(tvectors)){ tvectors <- matrix(tvectors, nrow=nrow(tvectors),ncol=ncol(tvectors), dimnames=list(rownames(tvectors),colnames(tvectors))) } if(is.matrix(tvectors)){ if(breakdown==TRUE){ if(class(P) != "character"){ P <- as.character(P) message("Note: P converted to character") } if(class(A) != "character"){ A <- as.character(A) message("Note: A converted to character") } P <- breakdown(P) A <- breakdown(A) } tvectors_P <- tvectors[P,] tvectors_A <- tvectors[A,] nrow <- nrow(tvectors) near.P <- neighbors(P,(m+1),tvectors=tvectors, breakdown=breakdown)[2:(m+1)] near.PA <- multicos(A,names(near.P),tvectors=tvectors, breakdown=breakdown)[1,] near.PA <- sort(near.PA,decreasing=T)[1:k] neighbors <- names(near.PA) tvectors_PA <- tvectors[names(near.PA),] if(k==1){ if(norm=="none"){ PA <- tvectors[P,]+tvectors[A,]+tvectors[names(near.PA),] P.Pred <- tvectors[P,]+tvectors[names(near.PA),] } if(norm=="all"){ PA <- normalize(tvectors[P,]) + normalize(tvectors[A,]) + normalize(tvectors[names(near.PA),]) P.Pred <- normalize(tvectors[P,]) + normalize(tvectors[names(near.PA),]) } if(norm=="block"){ PA <- normalize(tvectors[A,]) + normalize(tvectors[P,] + tvectors[names(near.PA),]) P.Pred <- normalize(tvectors[P,] + tvectors[names(near.PA),]) } } if(k >1){ if(norm=="none"){ PA <- tvectors[A,] + tvectors[P,] + colSums(tvectors[names(near.PA),]) P.Pred <- tvectors[P,] + colSums(tvectors[names(near.PA),]) } if(norm=="all"){ normPA <- tvectors[names(near.PA),] normPA <- t(apply(normPA,1,normalize)) PA <- normalize(tvectors[A,]) + normalize(tvectors[P,]) + colSums(normPA) P.Pred <- normalize(tvectors[P,]) + colSums(normPA) } if(norm=="block"){ PA <- normalize(tvectors[A,]) + normalize(tvectors[P,] + colSums(tvectors[names(near.PA),])) P.Pred <- normalize(tvectors[P,] + colSums(tvectors[names(near.PA),])) } } out <- list(PA=PA,P.Pred=P.Pred,neighbors=neighbors,P=P,A=A) class(out) <- "Pred" return(out) }else{ stop("tvectors must be a matrix!") } }
hulfun <- function(x) { n<-length(x) a <-median(x) y <- rep(0, n) for (i in 1: n) { if( x[i] <= (a)) { y[i] <-x[i]^2/2 } else y[i] <- a*x[i]-a^2/2 } return(y) }
parameterFrequency <- function(configurations, parameters, rows = 4, cols = 3, filename = NULL, pdf.width = 12, col = "gray") { xlab <- "values" ylab.cat <- "Frequency" ylab.num <- "Probability density" def.par <- par(no.readonly = TRUE) on.exit(par(def.par), add = TRUE) configurations <- removeConfigurationsMetaData(configurations) param.names <- as.character(parameters$names) nparams <- parameters$nbVariable nlines <- ceiling(nparams / cols) if (nlines < rows) rows <- nlines nplot <- 1 cplot <- 1 if (!is.null(filename)) { filename <- sub(".pdf", "", filename, fixed = TRUE) pdf(file = paste0(filename, "-", cplot, ".pdf"), onefile = TRUE, width = pdf.width) on.exit(dev.off(), add = TRUE) } par(mfrow=c(rows,cols), mar=0.1 + c(4,3,3,1)) for (param.name in param.names) { if (parameters$isFixed[[param.name]]){ cat("Skipping fixed parameter:", param.name, "\n") next } else { cat("Plotting:", param.name, "\n") } if (nplot > rows * cols) { cat("Make new plot\n") cplot <- cplot + 1 if (!is.null(filename)) { dev.off() pdf(file = paste0(filename, "-", cplot, ".pdf"), onefile = TRUE, width = pdf.width) } else { dev.new() } par(mfrow=c(rows, cols)) nplot <- 1 } data <- configurations[, param.name] type <- parameters$types[[param.name]] domain <- parameters$domain[[param.name]] if (type %in% c("c", "o")) { data <- factor(data, domain) data <- addNA(data, ifany = TRUE) levels(data)[is.na(levels(data))] <- "<NA>" data <- table(data) barplot(data, main = param.name, xlab = xlab, ylab = ylab.cat, col = col) } else if (type %in% c("i", "r")) { data <- data[!is.na(data)] if (length(data) == 0) { cat("All values are NA for: ", param.name, ", skipping plot\n") next } else { hist(data, xlim = domain, prob = TRUE, main = param.name, xlab = xlab, ylab = ylab.num, col = col) if (length(data) > 1) { lines(density(data), col = "blue", lwd = 2) } else { abline(v = data[1], col = "blue", lwd = 2) } } } nplot <- nplot + 1 } } parcoordlabel <- function (configurations, parameters, col = "green", lty = 1, lblcol="blue", title="Parameters parallel coordinates", ...) { replace.cat <- function(y, vals){ x <- rep(NA, length(y)) for(i in 1:length(vals)) x[y %in% vals[i]] <- i return(x) } replace.na <- function(x,r) { x <- unlist(x) x <- (x-r[1])/(r[2]-r[1]) x[is.na(x)] <- 1 return(x) } add.level <- function(x, bound, type){ if (type == "i" || type == "r"){ x <- c(x, x[length(x)] + (x[2]-x[1])) } else { x <- c(x, x[length(x)] + x[length(x)]/length(bound)) } return(x) } param.names <- colnames(configurations) bound.num <- parameters$domain for (i in 1:ncol(configurations)) { pname <- param.names[i] if (parameters$types[[pname]] %in% c("c","o")) { configurations[,i]<- as.numeric(replace.cat(configurations[,i], bound.num[[pname]])) bound.num[[pname]] <- seq(1,length(bound.num[[pname]])) } } pr <- lapply(bound.num[param.names], pretty) for(param in param.names) pr[[param]] <- add.level( pr[[param]], bound.num[[param]], parameters$types[[param]]) rx <- lapply(pr, range, na.rm = TRUE) configurations <- mapply(replace.na, as.data.frame(configurations), rx) matplot(1:ncol(configurations), t(configurations), type = "l", col = col, lty = lty, xlab = "", ylab = "", ylim=c(0,1), axes = FALSE, main=title, ...) axis(1, at = 1:ncol(configurations), labels = colnames(configurations), las=2) for (i in 1:ncol(configurations)) { pnames <- param.names[i] lines(c(i, i), c(0, 1), col = "grey70") if(parameters$types[[param.names[i]]]=="c"){ labels <- c(parameters$domain[[param.names[i]]], "NA") }else{ labels <- pr[[pnames]] labels[length(labels)] <- "<NA>" } text(c(i, i), seq(0,1,length.out=length(labels)), labels = labels, xpd = NA, col=lblcol) } invisible() } parallelCoordinatesPlot <- function(configurations, parameters, param_names=parameters$names, hierarchy = TRUE, filename = NULL, pdf.width = 14 , mar = c(8,1,4,1)) { getDependency <- function() { sdep <- list() independent <- c() for (param in param_names) { constraint <- all.vars(parameters$conditions[[param]]) if (length(constraint) < 1) { independent <- unique(c(independent, param)) next } for(cc in constraint) { if(is.null(sdep[[cc]]) || is.na(sdep[[cc]])) sdep[[cc]] <- param else sdep[[cc]] <- c(sdep[[cc]], param) } } return(list(independent=independent, dependencies=sdep)) } configurations <- configurations[, grep("^\\.", colnames(configurations), invert=TRUE), drop = FALSE] configurations <- configurations[, param_names] if(hierarchy){ aux <- getDependency() if(length(aux$independent) >= 2){ indconfigurations <- configurations[, aux$independent] if(!is.null(filename)) pdf(file = paste0(filename,"_independent.pdf"), width=pdf.width) else dev.new() par(mar=mar) parcoordlabel(indconfigurations, parameters, title="Independent parameter parallel coordinates") if(!is.null(filename)) dev.off() }else{ cat("Skipping independent parameters",aux$independent,"\n") } dep <- aux$dependencies dnames <- names(dep) for(i in seq_along(dep)){ if(length(dep[[i]]) < 2){ cat("Skipping parameters",dnames[i],"\n") next } depconfigurations <- configurations[, dep[[i]]] depconfigurations <- depconfigurations[!apply(apply(depconfigurations, 1, is.na), 2, all),] if(nrow(depconfigurations) < 2 ){ cat("Skipping parameter",dnames[i],"\n") next } if(!is.null(filename)) pdf(file = paste0(filename,"_",dnames[i],".pdf"), width=pdf.width) else dev.new() par(mar=mar) parcoordlabel(depconfigurations, parameters, title = paste0("Parameters of dependent of ", dnames[i], " parallel coordinates")) if (!is.null(filename)) dev.off() } }else{ if(!is.null(filename)) pdf(file = paste0(filename,".pdf"), width=pdf.width) par(mar=mar) parcoordlabel(configurations, parameters) if(!is.null(filename)) dev.off() } } getFinalElites <- function(iraceResults = NULL, logFile = NULL, n = 0, drop.metadata = FALSE) { if (is.null(iraceResults)) { if (is.null(logFile)) stop("You must supply either 'iraceResults' or 'logFile' argument.\n") else load(logFile) } last.elites <- iraceResults$allElites[[length(iraceResults$allElites)]] if (n == 0) n <- length(last.elites) if (length(last.elites) < n) { cat("Only", length(last.elites), "configurations available, reducing n,\n") n <- length(last.elites) } last.elites <- last.elites[1:n] configurations <- subset(iraceResults$allConfigurations, get(".ID.") %in% as.character(last.elites), drop = FALSE) if (drop.metadata) configurations <- removeConfigurationsMetaData(configurations) return(configurations) } getConfigurationById <- function(iraceResults = NULL, logFile = NULL, ids, drop.metadata = FALSE) { if (is.null(iraceResults)) { if (is.null(logFile)) stop("You must supply either iraceResults or iraceLog argument.\n") else load(logFile) } if (length(ids) < 1) stop("You must provide at least one configuration id.\n") selection <- iraceResults$allConfigurations[,".ID."] %in% ids if (length(selection) < 1) stop("No configuration found with id", ids,".\n") configurations <-iraceResults$allConfigurations[selection, , drop = FALSE] if (drop.metadata) configurations <- removeConfigurationsMetaData(configurations) return(configurations) } getConfigurationByIteration <- function(iraceResults = NULL, logFile = NULL, iterations, drop.metadata = FALSE) { if (is.null(iraceResults)) { if (is.null(logFile)) stop("You must supply either iraceResults or iraceLog argument.\n") else load(logFile) } if (length(iterations) < 1) stop("You must provide at least one configuration id.\n") iteration <- NULL ids <- unique(subset(as.data.frame(iraceResults$experimentLog), iteration %in% iterations, select=c("configuration"), drop=TRUE)) selection <- iraceResults$allConfigurations[,".ID."] %in% ids if (length(selection) < 1) stop("No configuration found with id", ids,".\n") configurations <- iraceResults$allConfigurations[selection, , drop=FALSE] if (drop.metadata) configurations <- removeConfigurationsMetaData(configurations) return(configurations) } configurationsBoxplot <- function(experiments, title = NULL, xlabel = "Configuration ID", ylabel = "Configuration cost", filename = NULL) { plot.jitter.points <- function(x, y, factor = 10 / x, pch = 20, ...) points(jitter(rep(x, length(y)), factor = factor), y, pch = pch, col = rgb(0,0,0,.2), ...) if (any(colSums(is.na(experiments)) > 0)) cat("Warning: There are NA values in the experiment results provided.\n") data.labels <- colnames(experiments) if (is.null(data.labels)) data.labels <- 1:ncol(experiments) if (!is.null(filename)) { filename <- paste0(filename, ".pdf.") cat("Creating file", filename,"\n") cairo_pdf(filename = filename, width=20, height=8) on.exit(dev.off(), add = TRUE) plot.mar <- c(7,11,4,1) plot.lwd <- 5 cex.axis <- 3 cex.main <- 3 x.add <- 2 } else { plot.mar <- c(2.5,9,4,1) plot.lwd <- 2 cex.axis <- 1 cex.main <- 1 x.add <- 0 } if (is.null(title)) plot.mar[3] <- 1 old.par <- par(las=1, mar=plot.mar, cex.axis=cex.axis, cex.main=cex.main, lwd=plot.lwd) on.exit(old.par, add = TRUE) boxplot(experiments, main = title, xaxt = "n", outline = TRUE) for (i in 1:ncol(experiments)) { plot.jitter.points (i, experiments[,i], cex = 1.5 * cex.axis) } axis(1, at=c(1:length(data.labels)), labels=data.labels, line = -0.5 + x.add, tick=FALSE, las=1, cex.axis=cex.axis) mtext(xlabel, side=1, line=1.5 + 2*x.add, cex=cex.axis, las=0) mtext(ylabel, side=2, line=5+1.8*x.add, cex=cex.axis, las=0) }
localMaxima <- function(x) { y <- diff(c(-.Machine$integer.max, x)) > 0L rle(y)$lengths y <- cumsum(rle(y)$lengths) y <- y[seq.int(1L, length(y), 2L)] if (x[[1]] == x[[2]]) { y <- y[-1] } y } localMinima <- function(x) { y <- diff(c(.Machine$integer.max, x)) > 0L rle(y)$lengths y <- cumsum(rle(y)$lengths) y <- y[seq.int(1L, length(y), 2L)] if (x[[1]] == x[[2]]) { y <- y[-1] } y }
HSImin <- function(x){ HSI <- min(x, na.rm=TRUE) if(HSI < 0 | HSI > 1){ HSIout <- "Habitat suitability index not within 0 to 1 range." } else { HSIout <- HSI } return(HSIout) }
NULL optimCrit.maximizeInnerR2 <- function(matrixpls.res){ -sum(r2(matrixpls.res)) } optimCrit.maximizeIndicatorR2 <- function(matrixpls.res){ matrixpls.res <- standardize(matrixpls.res) lambda <- loadings(matrixpls.res) IC <- attr(matrixpls.res,"IC") -sum(diag(lambda %*% IC)) } optimCrit.maximizeFullR2 <- function(matrixpls.res){ optimCrit.maximizeIndicatorR2(matrixpls.res) + optimCrit.maximizeInnerR2(matrixpls.res) } optimCrit.gsca <- function(matrixpls.res){ C <- attr(matrixpls.res,"C") IC <- attr(matrixpls.res,"IC") nativeModel <- attr(matrixpls.res,"model") reflective <- nativeModel$reflective reflective[which(reflective==1)] <- matrixpls.res[grep("=~",names(matrixpls.res))] r <- apply(nativeModel$reflective != 0,1,any) endo <- apply(nativeModel$inner != 0,1,any) inner_resid <- (1 - r2(matrixpls.res)[endo]) refl_resid <- (1 - rowSums(t(IC[,r]) * reflective[r,])) sum(inner_resid, refl_resid) }
clean_eval <- function( occ.cl, geo.space, env.space = NULL, level.filter = c("1_det_by_spec"), r, species = "species", decimal.longitude = "decimalLongitude", decimal.latitude = "decimalLatitude", scientific.name, longitude, latitude ){ if (!missing(scientific.name)) { warning("argument 'scientific.name' is deprecated; please use 'species' instead.", call. = FALSE) species <- scientific.name } if (!missing(latitude)) { warning("argument 'latitude' is deprecated; please use 'decimal.latitude' instead.", call. = FALSE) decimal.latitude <- latitude } if (!missing(longitude)) { warning("argument 'longitude' is deprecated; please use 'decimal.longitude' instead.", call. = FALSE) decimal.longitude <- longitude } natList_column <- "naturaList_levels" %in% colnames(occ.cl) if(!natList_column){ stop("'occ.cl' must be classified by 'classify_occ' function") } if(is(geo.space, "SpatialPolygons")) { geo.space <- sf::st_as_sf(geo.space) } if(!is(geo.space, "sf")) errorCondition("geo.space must be of class: sf or SpatialPolygons*") geo.space <- sf::st_geometry(geo.space) if(!is.null(env.space)){ if(is(env.space, "SpatialPolygons")) { env.space <- sf::st_as_sf(env.space) } if(!is(env.space, "sf")) errorCondition("env.space must be of class: sf or SpatialPolygons*") if(raster::nlayers(r) != 2) errorCondition("raster objetct must have two layers") } occ.full <- occ.cl %>% dplyr::rename("species" = species , "decimalLongitude" = decimal.longitude, "decimalLatitude" = decimal.latitude) %>% dplyr::select(.data$decimalLongitude, .data$decimalLatitude, .data$species) %>% dplyr::arrange(species) occ.cleaned <- occ.cl %>% dplyr::filter(.data$naturaList_levels %in% level.filter) %>% dplyr::select(.data$decimalLongitude, .data$decimalLatitude, .data$species) %>% dplyr::arrange(species) occ.list <- list(occ.full = occ.full, occ.cleaned = occ.cleaned) names.sp.full <- as.character(unique(occ.full$species)) v <- ifelse(is.na(raster::values(r[[1]])), NA, 0) sitexsp <- matrix(rep(v, length(names.sp.full)), nrow = raster::ncell(r), ncol = length(names.sp.full)) colnames(sitexsp) <- names.sp.full msg <- c("Calculating metrics before cleaning", "Calculating metrics after cleaning") res.list <- vector("list", 2) for(i in seq_along(occ.list)){ occ <- occ.list[[i]] message(msg[i]) geo.area <- rep(0, length(names.sp.full)) names(geo.area) <- names.sp.full env.area <- rep(0, length(names.sp.full)) names(env.area) <- names.sp.full names.current <- as.character(unique(occ$species)) names.sp <- names.sp.full %in% names.current message("..Step 1 - Geographical space") geo.polygon <- lapply(unique(occ$species), function(i){ x <- dplyr::filter(occ, .data$species == i) pt <- sf::st_multipoint(as.matrix(x[,1:2])) if(nrow(x) <=3){ sp.pol <- sf::st_buffer(pt, 0.5) } if(nrow(x) > 3){ sp.pol <- sf::st_convex_hull(pt) sp.pol <- sf::st_buffer(sp.pol, 0.5) } geo <- sf::st_geometry(sp.pol) sf::st_crs(geo) <- 4326 suppressMessages(sf::st_intersection(geo, geo.space)) }) res.geo.area <- sapply(geo.polygon, function(x) sum(sf::st_area(x))) geo.area[names.sp] <- res.geo.area geo.raster <- lapply(geo.polygon, function(x){ x <- sf::st_cast(x, "MULTIPOLYGON") fasterize::fasterize(sf::st_sf(a = 1, x), r[[1]], background = 0) }) stk <- raster::stack(geo.raster) msk <- fasterize::fasterize(sf::st_sf(a = 1, geo.space), r[[1]]) stk <- raster::mask(stk, msk) sitexsp[,names.sp] <- raster::values(stk) if(!is.null(env.space)){ message("..Step 2 - Enviromental space") env.std <- vegan::decostand(raster::as.data.frame(r), "range", na.rm = T) env.polygon <- lapply(unique(occ$species), function(i){ x <- dplyr::filter(occ, .data$species == i) sp.cell <- unique(raster::cellFromXY(r[[1]], x[, 1:2])) env.row <- row.names(env.std) %in% sp.cell env.xy <- env.std[sp.cell,] if(any(is.na(env.xy))){ warningCondition("There are occurrence points in raster cells without values (NA)") } pt <- sf::st_multipoint(na.omit(as.matrix(env.xy))) if(nrow(x) <=3){ sp.pol <- sf::st_buffer(pt, 0.025) } if(nrow(x) > 3){ sp.pol <- sf::st_convex_hull(pt) sp.pol <- sf::st_buffer(sp.pol, 0.025) } suppressMessages(sf::st_intersection(sf::st_geometry(sp.pol), env.space)) }) res.env.area <- sapply(env.polygon, function(x) sum(sf::st_area(x))) env.area[names.sp] <- res.env.area res.list[[i]] <- list(geo.area = geo.area, env.area = env.area, sitexsp = sitexsp) } if(is.null(env.space)) { res.list[[i]] <- list(geo.area = geo.area, sitexsp = sitexsp) } } message("Preparing outputs") site.coords <- raster::coordinates(r) remmain.geo.area <- round(res.list[[2]]$geo.area/res.list[[1]]$geo.area, 2) if(!is.null(env.space)) { remmain.env.area <- round(res.list[[2]]$env.area/res.list[[1]]$env.area, 2) area <- data.frame(r.geo.area = remmain.geo.area, r.env.area = remmain.env.area) } if(is.null(env.space)) { area <- data.frame(r.geo.area = remmain.geo.area) } comp <- list(comp.BC = res.list[[1]]$sitexsp, comp.AC = res.list[[2]]$sitexsp) rich <- data.frame(rich.BC = rowSums(res.list[[1]]$sitexsp), rich.AC = rowSums(res.list[[2]]$sitexsp)) results <- list(area = area, comp = comp, rich = rich, site.coords = site.coords) message("DONE!") return(results) }
test_that("le_week() | scalar test", { expect_equal(le_week(lubridate::dhours(2.3), lubridate::dhours(4.5), 5), lubridate::as.duration( stats::weighted.mean(c(lubridate::dhours(2.3), lubridate::dhours(4.5)), c(5, 2)))) expect_equal(le_week(lubridate::dhours(5.25), lubridate::dhours(1.25), 3), lubridate::as.duration( stats::weighted.mean(c(lubridate::dhours(5.25), lubridate::dhours(1.25)), c(3, 4)))) expect_equal(le_week(lubridate::as.duration(NA), lubridate::dhours(2.35), 2), lubridate::as.duration(NA)) }) test_that("le_week() | vector test", { expect_equal(le_week(c(lubridate::dhours(2.4), lubridate::dhours(0.5)), c(lubridate::dhours(2.4), lubridate::dhours(NA)), c(3, 7)), c(lubridate::duration( stats::weighted.mean(c(lubridate::dhours(2.4), lubridate::dhours(2.4)), c(3, 4))), lubridate::as.duration(NA))) expect_equal(le_week(c(lubridate::dhours(1.8), lubridate::dhours(5.4)), c(lubridate::dhours(6.7), lubridate::dhours(1.2)), c(5, 6)), c(lubridate::duration( stats::weighted.mean(c(lubridate::dhours(1.8), lubridate::dhours(6.7)), c(5, 2))), lubridate::duration( stats::weighted.mean(c(lubridate::dhours(5.4), lubridate::dhours(1.2)), c(6, 1))))) }) test_that("le_week() | error test", { expect_error(le_week(1, lubridate::duration(1), 1), "Assertion on 'le_w' failed") expect_error(le_week(lubridate::duration(1), 1, 1), "Assertion on 'le_f' failed") expect_error(le_week(lubridate::duration(1), lubridate::duration(1), "a"), "Assertion on 'wd' failed") expect_error(le_week(lubridate::duration(1), lubridate::duration(1), 1.5), "Assertion on 'wd' failed") expect_error(le_week(lubridate::duration(1), lubridate::duration(1), -1), "Assertion on 'wd' failed") expect_error(le_week(lubridate::duration(1), lubridate::duration(1), 8), "Assertion on 'wd' failed") expect_error(le_week(lubridate::duration(1), lubridate::duration(1), c(1, 1))) })
context("Presence of ADMIXTOOLS on the system") test_that("ADMIXTOOLS is present", { skip_on_cran() skip_on_os("windows") expect_true(admixtools_present()) }) test_that("ADMIXTOOLS data is present", { skip_on_cran() skip_on_os("windows") expect_true(dir.exists(file.path(admixtools_path(), "data"))) })
lm_cluster_compute_vcov <- function(mod, cluster, data) { require_namespace("sandwich") if ( length(cluster) > 1 ){ v1 <- cluster } else { v1 <- data[,cluster, drop=TRUE] } dfr <- data.frame( cluster=v1 ) vcov2 <- sandwich::vcovCL( x=mod, cluster=dfr$cluster) return(vcov2) }
context ("Creating a coefficient matrix") cm_de <- coefficient_matrix_create ( data_table = iotable_get ( source = "germany_1990"), total = "output", digits = 4 ) ncol(coefficient_matrix_create ( data_table = iotable_get ( source = "germany_1990"), total = "output", digits = 4 )) test_that("correct data is returned", { expect_equal(as.numeric(unlist (cm_de[1, 2] )), c(0.0258), tolerance=1e-3) expect_equal(nrow(coefficient_matrix_create( iotable_get(), return = "primary_inputs" )), 13 ) }) test_that("households are treated correctly", { expect_equal(ncol(coefficient_matrix_create ( data_table = iotable_get ( source = "germany_1990"), total = "output", households = TRUE, digits = 4 )), 8) expect_equal(ncol(coefficient_matrix_create ( data_table = iotable_get ( source = "germany_1990"), total = "output", digits = 4 )), 7) })
result2tibble <- function(...) { input <- list(...)[[1]][[1]] if(length(list(...)) == 2) { cols <- list(...)[[2]] ma <- matrix(input, ncol=cols, byrow=TRUE) colnames(ma) <- colnames(ma, do.NULL = FALSE, prefix = "X") res_frame <- as.data.frame(ma, stringsAsFactors = FALSE) res_frame <- suppressWarnings(adj_coltype(res_frame, input[1:cols])) return(as_tibble(res_frame)) } else { template <- input[[1]] %>% str_replace_all("[\\[\\]]", "") %>% str_replace_all(", ", ",") %>% str_replace_all("\"", "'") %>% strsplit(",") %>% .[[1]] input <- input %>% str_replace_all("[\\[\\]]", "") %>% str_replace_all(", ", ",") %>% strsplit(",") num_cols <- length(template) num_rows <- length(input) res_frame <- data.frame(matrix(ncol = num_cols, nrow = 0)) for (i in 1:num_rows) { res_frame <- rbindlist(list(res_frame, as.list(input[[i]]))) } res_frame %<>% as.data.frame() res_frame <- suppressWarnings(adj_coltype(res_frame, template)) return(as_tibble(res_frame)) } }
select_parameters <- function(model, ...) { UseMethod("select_parameters") } select_parameters.lm <- function(model, direction = "both", steps = 1000, k = 2, ...) { junk <- utils::capture.output(best <- stats::step(model, trace = 0, direction = direction, steps = steps, k = k, ... )) best } select_parameters.merMod <- function(model, direction = "backward", steps = 1000, ...) { insight::check_if_installed("cAIC4") factors <- unique(c( insight::find_random(model, split_nested = FALSE, flatten = TRUE), insight::find_random(model, split_nested = TRUE, flatten = TRUE) )) factors <- gsub(":", "/", factors, fixed = TRUE) best <- suppressMessages(suppressWarnings(cAIC4::stepcAIC(model, groupCandidates = factors, direction = direction, steps = steps, allowUseAcross = TRUE )$finalModel)) best }
print.lrpower <- function(x, ...) { s = x$byStageResults; t = x$overallResults; k = length(s$informationRates) if (k>1) { df = t(data.frame(s$informationRates, s$efficacyBounds, s$futilityBounds, s$cumulativeRejection, s$cumulativeFutility, s$cumulativeAlphaSpent, s$numberOfEvents, s$numberOfSubjects, s$analysisTime, s$efficacyHR, s$futilityHR, s$efficacyP, s$futilityP, s$information, s$logRankHR, t$overallReject, t$alpha, t$numberOfEvents, t$expectedNumberOfEvents, t$numberOfSubjects, t$expectedNumberOfSubjects, t$studyDuration, t$expectedStudyDuration, t$accrualDuration, t$followupTime, t$fixedFollowup, t$rho1, t$rho2)) df[seq(16,28), -1] <- NA colnames(df) <- paste("stage", seq_len(ncol(df)), sep=" ") } else { df = t(data.frame(t$overallReject, t$alpha, t$numberOfEvents, t$numberOfSubjects, t$studyDuration, t$accrualDuration, t$followupTime, t$fixedFollowup, t$rho1, t$rho2, s$efficacyBounds, s$efficacyHR, s$efficacyP, s$information, s$logRankHR )) colnames(df) <- NA } rownames(df) <- sub("^[[:alpha:]][[:alnum:]]*.", "", rownames(df)) print( round(df,3), ..., na.print = "" , quote = FALSE ) invisible(x) }
library(network) test<-network.initialize(3) set.vertex.attribute(test,c('a','b'),c(1,2)) if(!all(test%v%'a'==c(1,1,1) & test%v%'b'==c(2,2,2))){ stop('setting multiple attribute values with set.vertex.attribute failed') } test<-network.initialize(3) set.vertex.attribute(test,list('a','b'),c(1,2)) if(!all(test%v%'a'==c(1,1,1) & test%v%'b'==c(2,2,2))){ stop('setting multiple attribute values with set.vertex.attribute failed') } test<-network.initialize(3) set.vertex.attribute(test,c('a','b'),list(c(1,2,3),c(4,5,6))) if(!all(test%v%'a'==c(1,2,3) & test%v%'b'==c(4,5,6))){ stop('setting multiple attribute values with set.vertex.attribute failed') } test<-network.initialize(3) set.vertex.attribute(test,c('a','b'),list(list(1,2,3),list(4,5,6))) if(!all(test%v%'a'==c(1,2,3) & test%v%'b'==c(4,5,6))){ stop('setting multiple attribute values with set.vertex.attribute failed') } test<-network.initialize(3) obj<-list(one='a complex object',two=c('with muliple','parts')) set.vertex.attribute(test,c('a','b'),list(list(as.list(obj)),list(as.list(obj)))) if(!all(all.equal(get.vertex.attribute(test,'a',unlist=FALSE)[[1]],obj) & all.equal(get.vertex.attribute(test,'b',unlist=FALSE)[[1]],obj))){ stop('setting multiple attribute values with list values in set.vertex.attribute failed') } net <- network.initialize(2) netlist <- list(net) set.network.attribute(netlist[[1]],"test","a value") if (!"test" %in% list.network.attributes(netlist[[1]])) stop('assignment to list of networks failed') test<-network.initialize(3) set.network.attribute(test,c("a","b"),1:2) if (!all(test%n%'a'==1,test%n%'b'==2)){ stop('mulltiple attribute assignment failed for set.network.attribute') } test<-network.initialize(3) set.network.attribute(test,list("a","b"),as.list(1:2)) if (!all(test%n%'a'==1,test%n%'b'==2)){ stop('mulltiple attribute assignment failed for set.network.attribute') } test<-network.initialize(3) add.edges(test,tail=1:3,head=c(2,3,1)) net<-test set.edge.attribute(net,c("a","b"),1:2) if (!all(net%n%'a'==1,net%n%'b'==2)){ stop('mulltiple attribute assignment failed for set.edge.attribute') } net<-test set.edge.attribute(net,c('a','b'),list(c(1,2,3),c(4,5,6))) if(!all(net%e%'a'==c(1,2,3) & net%e%'b'==c(4,5,6))){ stop('setting multiple attribute values with set.edge.attribute failed') } net<-test set.edge.attribute(net,c('a','b'),list(list(1,2,3),list(4,5,6))) if(!all(net%e%'a'==c(1,2,3) & net%e%'b'==c(4,5,6))){ stop('setting multiple attribute values with set.edge.attribute failed') } net<-test obj<-list(one='a complex object',two=c('with muliple','parts')) set.edge.attribute(net,c('a','b'),list(list(as.list(obj)),list(as.list(obj)))) if(!all(all.equal(get.edge.attribute(net,'a',unlist=FALSE)[[1]],obj) & all.equal(get.edge.attribute(net,'b',unlist=FALSE)[[1]],obj))){ stop('setting multiple attribute values with list values in set.edge.attribute failed') } net<-network.initialize(3) add.edges(net,c(1,2,3),c(2,3,1)) set.edge.attribute(net,'test',"a") if(!all(get.edge.attribute(net,'test')==c("a","a","a"))){stop("overloading of get.edge.attribute to get.edge.value not working correctly ")} delete.edges(net,2) set.edge.attribute(net,'foo','bar',1) if(!identical(list('bar',NULL,NULL),get.edge.attribute(net,'foo',unlist=FALSE, deleted.edges.omit = FALSE))){ stop("deleted.edges.omit argument causing bad return values in get.edge.attribute ") } if(!identical(list('bar',NULL),get.edge.attribute(net,'foo',unlist=FALSE, deleted.edges.omit = TRUE))){ stop("deleted.edges.omit argument causing bad return values in get.edge.attribute ") } if(!identical(c('bar'),get.edge.attribute(net,'foo',unlist=TRUE,deleted.edges.omit=TRUE))){ stop("omission argument causing bad return values in get.edge.attribute") } if(!identical(c('bar'),get.edge.attribute(net,'foo',unlist=TRUE,deleted.edges.omit=TRUE))){ stop("omission arguments causing bad return values in get.edge.attribute") } if(!identical(c('bar'),get.edge.attribute(net,'foo',unlist=TRUE,null.na=FALSE))){ stop("null.na arguments causing bad return values in get.edge.attribute") } if(!identical(c('bar',NA),get.edge.attribute(net,'foo',unlist=TRUE,null.na=TRUE))){ stop("null.na arguments causing bad return values in get.edge.attribute") } if(!identical(list('bar',NULL,NULL),get.edge.attribute(net,'foo',unlist=FALSE,null.na=FALSE))){ stop("null.na arguments causing bad return values in get.edge.attribute") } if(!identical(list('bar',NULL,NA),get.edge.attribute(net,'foo',unlist=FALSE,null.na=TRUE))){ stop("null.na arguments causing bad return values in get.edge.attribute") } set.edge.attribute(net,'na',TRUE,e=1) if(!identical(list('bar',NULL,NULL),get.edge.attribute(net,'foo',unlist=FALSE,na.omit=FALSE))){ stop("na.omit argument causing bad return values in get.edge.attribute") } if(!identical(list(NULL,NULL),get.edge.attribute(net,'foo',unlist=FALSE,na.omit=TRUE))){ stop("na.omit argument causing bad return values in get.edge.attribute") } if(!identical(c('bar'),get.edge.attribute(net,'foo',unlist=TRUE,na.omit=FALSE))){ stop("na.omit argument causing bad return values in get.edge.attribute") } if(!identical(NULL,get.edge.attribute(net,'foo',unlist=TRUE,na.omit=TRUE))){ stop("na.omit argument causing bad return values in get.edge.attribute") } if(!identical(c(TRUE,FALSE),get.edge.attribute(net,'na',na.omit=FALSE))){ stop("get.edge.attribute did not return correct values for 'na' attribute with na.omit=FALSE") } if(!identical(c(FALSE),get.edge.attribute(net,'na',na.omit=TRUE))){ stop("get.edge.attribute did not return correct values for 'na' attribute with na.omit=TRUE") } if(!identical(list(),get.edge.attribute(network.initialize(3),'foo',unlist=FALSE))){ stop("get.edge.attribute did not return correct values network with no edges") } if(!identical(NULL,get.edge.attribute(network.initialize(3),'foo',unlist=TRUE))){ stop("get.edge.attribute did not return correct values network with no edges") } if(!identical(NULL,get.edge.attribute(net,'bar'))){ stop("get.edge.attribute did not return correct values for attribute that does not exist") } net<-network.initialize(3) net[1,2]<-1 net[1,3]<-1 set.edge.attribute(net,'nullval',list(NULL)) if(!identical(list(NULL,NULL),get.edge.attribute(net,'nullval',unlist=FALSE,null.na=FALSE))){ stop("get.edge.attribute not returning NULL values stored as edge attribute correctly") } if(!identical(NULL,get.edge.attribute(net,'nullval',null.na=FALSE))){ stop("get.edge.attribute not returning NULL values stored as edge attribute correctly") } if(!identical(NULL,get.edge.attribute(net,'nullval',null.na=TRUE))){ stop("get.edge.attribute not returning NULL values stored as edge attribute correctly") }
knitr::opts_chunk$set( collapse = TRUE, comment = " ) library(stoRy) library(stoRy) help(package = "stoRy") ?get_similar_stories citation("stoRy") which_lto() set_lto(version = "demo") print_lto() ?`lto-demo` demo_metadata_tbl <- clone_active_metadata_tbl() demo_themes_tbl <- clone_active_themes_tbl() demo_stories_tbl <- clone_active_stories_tbl() demo_collections_tbl <- clone_active_collections_tbl() theme <- Theme$new(theme_name = "mass hysteria") theme theme$print(canonical = TRUE) theme$annotations() suppressMessages(library(dplyr)) library(stringr) demo_themes_tbl <- clone_active_themes_tbl() demo_themes_tbl %>% filter(str_detect(theme_name, "mass")) story <- Story$new(story_id = "tz1959e1x22") story story$print(canonical = TRUE) themes <- story$themes() themes title <- "The Monsters Are Due on Maple Street" demo_stories_tbl <- clone_active_stories_tbl() story_id <- demo_stories_tbl %>% filter(title == !!title) %>% pull(story_id) story_id story$collections() collection <- Collection$new(collection_id = "Collection: tvseries: The Twilight Zone (1959)") collection collection$print(canonical = TRUE) demo_collections_tbl <- clone_active_collections_tbl() demo_collections_tbl collection <- Collection$new(collection_id = "Collection: tvseries: The Twilight Zone (1959)") result_tbl <- get_featured_themes(collection) result_tbl result_tbl <- get_featured_themes() result_tbl test_collection <- Collection$new(collection_id = "Collection: tvseries: The Twilight Zone (1959)") result_tbl <- get_enriched_themes(test_collection) result_tbl result_tbl <- get_enriched_themes(test_collection, weights = list(choice = 1, major = 1, minor = 0)) result_tbl query_story <- Story$new(story_id = "tz1959e1x22") result_tbl <- get_similar_stories(query_story) result_tbl
test_that("errors on invalid input", { expect_error(select_args_text(sum, "-xlab:"), "Failed to parse") expect_error(select_args_text(sum, '"a"'), "numbers") f <- function(x, y, z) {} expect_error(select_args_text(f, "-x:z"), "numbers") }) test_that("positive initial values starts from nothing", { f <- function(x, y, z) {} expect_equal(select_args_text(f, "x y"), c("x", "y")) }) test_that("negative initial starts from everything", { f <- function(x, y, z) {} expect_equal(select_args_text(f, "-z"), c("x", "y")) }) test_that("can alternative exclusion and inclusion", { f <- function(x, y, z) {} expect_equal(select_args_text(f, "-z z"), c("x", "y", "z")) expect_equal(select_args_text(f, "z -z"), character()) })
check_n_N_class <- function(x) { if (class(x) == "select") { class_x <- "select" } else { if (length(x) == 1) { class_x <- "multiplier" } else if (mode(x) == "numeric") { class_x <- "vector" } else { class_x <- "list" if (any(sapply(x, class) == "range")) { class_x <- paste(class_x, "with ranges") } else { class_x <- paste(class_x, "without ranges") } } } return(class_x) }
arxiv_open <- function(search_results, limit=20) { stopifnot(limit >= 1) if(nrow(search_results) == 0) return(invisible(NULL)) links <- search_results$link_abstract links <- links[links != ""] if(length(links) > limit) { warning("More abstracts (", length(links), ") than maximum to be opened (", limit, ").") links <- links[1:limit] } for(link in links) { delay_if_necessary() utils::browseURL(link) } invisible(links) }
estim.R2<-function(dataX,y, subset, logistic, any.cat, max.iter){ if(!logistic){ if(!any.cat){ X_<-as.matrix(dataX[,as.vector(subset)]) X_<-cbind(rep(1, nrow(X_)), X_) estim.beta<-solve(t(X_)%*%X_)%*%t(X_)%*%y R2<-1-(var(y-X_%*%estim.beta)/var(y)) }else{ X_<-dataX[,as.vector(subset)] dat<-data.frame(y,X_) R2<-summary(lm(y~., data=dat))$r.squared } }else{ fit.control=glm.control(maxit=max.iter) X_<-dataX[,as.vector(subset)] dat<-data.frame(y,X_) sum_glm<-summary(glm(y~., data=dat, family="binomial", control=fit.control)) R2<-1-(sum_glm$deviance/sum_glm$null.deviance) } return(R2) } calc.lmg<-function(X, y, d, logistic, indices, comb_weights, any.cat, parl,clus, boot,max.iter, i=1:nrow(X)){ X<-X[i,] y<-y[i] R2s<-rep(list(0), d+1) for(j in 1:d){ if(!is.null(parl)){ R2s[[j+1]]<-parallel::parApply(clus,indices[[j+1]], 2,estim.R2, dataX=X, y=y, logistic=logistic, any.cat=any.cat, max.iter=max.iter) }else{ R2s[[j+1]]<-apply(indices[[j+1]], 2,estim.R2, dataX=X, y=y, logistic=logistic, any.cat=any.cat, max.iter=max.iter) } } if(!is.null(parl)){ res_lmg <- foreach::foreach(i=1:d, .combine=cbind)%dopar%{ res=0 for(ord in 1:d){ if(ord==1){ idx<-which(indices[[ord+1]]==i) res=res+comb_weights[ord]*R2s[[ord+1]][as.vector(idx)] }else{ idx_j<-which(indices[[ord+1]]==i, arr.ind=T)[,2] idx_woj<-which(apply(indices[[ord]]!=i, 2, all)) tot_incr<-sum(R2s[[ord+1]][as.vector(idx_j)]) - sum(R2s[[ord]][as.vector(idx_woj)]) res=res+comb_weights[ord]*tot_incr } } res } }else{ res_lmg<-rep(0,d) for(var_j in 1:d){ for(ord in 1:d){ if(ord==1){ idx<-which(indices[[ord+1]]==var_j) res_lmg[var_j]=res_lmg[var_j]+comb_weights[ord]*R2s[[ord+1]][as.vector(idx)] }else{ idx_j<-which(indices[[ord+1]]==var_j, arr.ind=T)[,2] idx_woj<-which(apply(indices[[ord]]!=var_j, 2, all)) tot_incr<-sum(R2s[[ord+1]][as.vector(idx_j)]) - sum(R2s[[ord]][as.vector(idx_woj)]) res_lmg[var_j]=res_lmg[var_j]+comb_weights[ord]*tot_incr } } } } res_lmg=res_lmg/d if(boot){ return(res_lmg) }else{ return(list("R2"=R2s, "lmg"=res_lmg)) } } lmg<-function(X, y, logistic = FALSE, rank=FALSE, nboot = 0, conf=0.95, max.iter=1000, parl=NULL){ d<-ncol(X) any.cat<-any(sapply(X,class)=="factor") if(!(class(X)[1]=="matrix" | class(X)[1]=="data.frame")){ stop("X must be either a matrix of a data frame.") } if (logistic==F & !is.numeric(y)){ stop(paste("y must be a numeric vector of length ",nrow(X)," or a matrix with ", nrow(X), " rows and one column.", sep="")) }else if(logistic==T & (all(!is.logical(y), !is.factor(y), !is.numeric(y))) ){ stop("y must be a logical, factor or numeric vector.") }else if (length(y)!=nrow(X)){ stop(paste("y must be a vector of length ",nrow(X)," or a matrix with ", nrow(X), " rows and one column.", sep="")) } if(!is.logical(logistic)){ stop("The 'logistic' argument must be logical, either TRUE or FALSE.") } if(!is.logical(rank)){ stop("The 'rank' argument must be logical, either TRUE or FALSE.") }else if (logistic & rank){ rank=F warning("Impossible to perform a logistic regression with a rank transformation. Defaulted to rank=FALSE.") }else if (any.cat & rank){ rank=F warning("Impossible to perform a rank transformation with categorical inputs. Defaulted to rank=FALSE.") } if(!is.numeric(nboot)){ stop("The 'nboot' argument must be a positive integer.") }else if (nboot%%1!=0 | nboot<0 | length(nboot)!=1){ stop("The 'nboot' argument must be a positive integer.") }else if(nboot==0){ boot=F }else{ boot=T } if(!is.numeric(parl)){ if(!is.null(parl)){ stop("The 'parl' argument must be NULL or a positive integer.") } }else if(parl%%1!=0 | parl<0 | length(parl)!=1){ stop("The 'parl' argument must be NULL or a positive integer.") }else if (parl>parallel::detectCores()){ parl=parallel::detectCores()-1 warning(paste("Too many cores specified. Defaulted to ", parl, " cores.", sep="")) } if(rank){ X<-apply(X, 2, rank) } indices<-rep(list(0), d+1) comb_weights<-rep(0,d) for(j in 1:d){ indices[[j+1]]<-t(gtools::combinations(n=d, r=j)) comb_weights[j]<-1/choose(n=(d-1), j-1) } if(!is.null(parl)){ cl=parallel::makeCluster(parl) doParallel::registerDoParallel(cl) }else{ cl=NULL } res_lmg<-calc.lmg(X=X, y=y, d=d, logistic=logistic, indices=indices, comb_weights=comb_weights, any.cat=any.cat, parl=parl, clus=cl, max.iter=max.iter, boot=F) res_lmg$lmg<-matrix(res_lmg$lmg, ncol=1) colnames(res_lmg$lmg)<-"original" rownames(res_lmg$lmg)<-colnames(X) out<-list("call"=match.call(), "lmg"=res_lmg$lmg, "R2s"=res_lmg$R2, "indices"=indices, "w"=comb_weights, "conf_int"=NULL, "X"=X, "y"=y, "logistic"=logistic, "boot"=boot, "nboot"=nboot, "rank"=rank, "parl"=parl, "conf"=conf ) if(boot){ if(is.null(parl)){ boot_lmg<-boot(data=cbind(X), statistic=calc.lmg, R=nboot, d=d, y=y, logistic=logistic, indices=indices, comb_weights=comb_weights, any.cat=any.cat, parl=NULL, clus=NULL, boot=boot, max.iter=max.iter, stype="i") }else{ boot_lmg<-boot(data=cbind(X), statistic=calc.lmg, R=nboot, d=d, y=y, logistic=logistic, indices=indices, comb_weights=comb_weights, any.cat=any.cat, parl=NULL, clus=NULL, boot=boot, stype="i", parallel="snow", ncpus=parl, max.iter=max.iter, cl=cl) } CI_lmg<-bootstats(boot_lmg, conf, "basic") rownames(CI_lmg)=colnames(X) out$conf_int<-CI_lmg } if(!is.null(parl)){parallel::stopCluster(cl)} class(out)<-"lmg" return(out) } print.lmg<-function(x, ...){ cat("\nCall:\n", deparse(x$call), "\n", sep = "") if(x$logistic){ cat("\nLMG decomposition of R2 for logistic model\n") }else{ cat("\nLMG decomposition of R2 for linear model\n") } if(x$boot){ print(x$conf_int) }else{ print(x$lmg) } } plot.lmg<-function(x, ylim=c(0,1), ...){ if(x$logistic){ plot_title="LMG decomposition of R2 for logistic model" }else{ plot_title="LMG decomposition of R2 for linear model" } plot(x$lmg, ylim=ylim, axes=F, xlab="Inputs", ylab="LMG", main=plot_title, ...) axis(2) axis(1, at=seq_along(x$lmg), labels=colnames(x$X)) box() graphics::grid() if(x$boot){ segments(x0=1:ncol(x$X), y0=x$conf_int$`min. c.i.`, x1=1:ncol(x$X), y1=x$conf_int$`max. c.i.`) legend("topright", pch=c(1,NA), lty=c(NA,1), legend=c("LMG values", paste(x$conf*100, "% Confidence interval", sep="")), bg="white") }else{ legend("topright", pch=1, legend=c("LMG Values"), bg="white") } }
varranges <- function(E=NULL, F=NULL, G=NULL, H=NULL, EqA, EqB=NULL, ispos=FALSE, tol=1e-8) { if (! is.matrix(E) & ! is.null(E)) E <- t(as.matrix(E)) if (! is.matrix(G) & ! is.null(G)) G <- t(as.matrix(G)) if (! is.matrix(EqA) & ! is.null(EqA)) EqA <- t(as.matrix(EqA)) Neq <- nrow(E) Nx <- ncol(E) Nineq <- nrow(G) if (is.null(Nineq)) Nineq <- 0 if (is.null(Neq)) Neq <- 0 NVar <- nrow(EqA) con <- E rhs <- F dir <- rep("==",Neq) if (Nineq > 0) { con <- rbind(con,G) rhs <- c(rhs,H) dir <- c(dir,rep(">=",Nineq)) } Range <- matrix(ncol=2,nrow=NVar,NA) if (ispos) { obj <- vector(length = Nx) for (i in 1:NVar) { obj <- EqA[i,] lmin <- lp("min",obj,con,dir,rhs) if (lmin$status == 0) Range[i,1] <- lmin$objval else if (lmin$status == 3) Range[i,1] <- -1e30 else Range[i,1] <- NA lmax <- lp("max",obj,con,dir,rhs) if (lmax$status == 0) Range[i,2] <- lmax$objval else if (lmax$status == 3) Range[i,2] <- 1e30 else Range[i,2] <- NA } } else { Sol <- lsei(E=E,F=F,G=G,H=H) if (Sol$residualNorm > tol) { Sol <- ldei(E=E,F=F,G=G,H=H) if (Sol$residualNorm > tol) { warning (paste("cannot proceed: problem not solvable at requested tolerance",tol)) return(Range) } } con <- cbind(con,-1*con) EqA <- cbind(EqA,-1*EqA) for (i in 1:NVar) { obj <- EqA[i,] lmin <- lp("min", obj, con, dir, rhs) if(lmin$status == 0) Range[i, 1] <- lmin$objval else if(lmin$status == 3) Range[i, 1] <- -1e30 else Range[i, 1] <- NA lmax <- lp("max", obj, con, dir, rhs) if(lmax$status == 0) Range[i, 2] <- lmax$objval else if(lmax$status == 3) Range[i, 2] <- 1e30 else Range[i, 2] <- NA } } if (!is.null(EqB)) { Range[,1]<-Range[,1]-EqB Range[,2]<-Range[,2]-EqB } colnames(Range) <- c("min","max") rownames(Range) <- rownames(EqA) return(Range) }
NULL generate.PrecipitationOccurrenceModel <- function(x,newdata=NULL,previous=NULL,n=30,random=runif(n,min=0,max=1),exogen=NULL,monthly.factor=NULL,...) { p <- x$p if (p<1) previous <- NULL if (!is.null(exogen)) newdata <- as.data.frame(exogen) if (is.null(newdata) & is.null(monthly.factor)) { newdata <- x$predictor } else if (is.null(newdata)) { newdata <- as.data.frame(array(NA,c(length(monthly.factor),0))) } if (!is.null(monthly.factor)) newdata$month <- factor(monthly.factor) if (nrow(newdata)<n) { warning("Warning: n is reduced, insufficient numbers of predictors!") n <- nrow(newdata) } names_n <- names(newdata) newdata <- as.data.frame(newdata[1:n,]) names(newdata) <- names_n if (is.null(previous)) { previous <- rnorm(x$p)>=0 } out <- array(NA,n) for (i in 1:n) { prob <- 1-predict(x,newdata=newdata[i,],previous=previous,type="response",...) out[i] <- random[i]>=prob previous <- c(out[i],previous[-p]) } return(out) } NULL generate.CCGammaObjectListPerEachMonth <- function(x,...) { class(x) <- "list" out <- generate(x,...) return(out) } NULL generate.PrecipitationOccurrenceMultiSiteModel <- function(x,exogen,n=10,origin="1961-1-1",end="1990-1-1",previous=NULL,monthly.factor=NULL,...) { out <- NULL if (is.null(monthly.factor)) { dates <- as.Date(origin):as.Date(end) months <- adddate(as.data.frame(dates),origin=origin)$month n <- length(months) } else { months <- monthly.factor n <- length(months) } if (x$type=="wilks") { monthsf <- sprintf("month%02d",months) gen_wilks <- generate(x$ccgamma,FUN=rnorm,type="covariance",names=x$station,factor.series=monthsf) for (c in 1:ncol(gen_wilks)) { gen_wilks[,c] <- pnorm(gen_wilks[,c]) } if (is.null(exogen)) { exogen <- lapply(X=x$station,FUN=function(x){ NULL }) names(exogen) <- x$station } if (is.null(previous)) { previous <- lapply(X=x$station,FUN=function(x){ NULL }) names(previous) <- x$station } out <- as.data.frame(array(NA,dim(gen_wilks))) names(out) <- names(gen_wilks) for (it in x$station) { if (is.data.frame(exogen)) { cols <- str_detect(names(exogen),it) exogen_loc <- exogen[,cols] } else if (is.list(exogen)) { exogen_loc <- exogen[[it]] } else { exogen_loc <- exogen } if (is.data.frame(previous)) { previous_loc <- previous[,it] } else { previous_loc <- previous[[it]] } message(paste("Processing",it)) out[,it] <- generate(x[[it]],previous=previous_loc,exogen=exogen_loc,monthly.factor=factor(months),random=gen_wilks[,it],n=n) } } else if (x$type=="logit") { if (is.null(exogen)) { exogen <- as.data.frame(array(NA,c(n,0))) } if (is.null(previous)) { previous <- as.data.frame(array(rnorm(x$p*x$K)>=0,c(x$p,x$K))) names(previous) <- x$station } else { previous <- previous[,x$station] } out <- as.data.frame(array(NA,c(n,length(x$station)))) names(out) <- x$station percs <- seq(from=0,to=100,by=5) npercs <- trunc(percs/100*n) for (ncnt in 1:n) { if (ncnt %in% npercs) { valprec <- percs[npercs==ncnt] message <- paste(sprintf("Processing: %0.2f",valprec),"%",sep="") message(message) } out[ncnt,] <- unlist(lapply(X=x[x$station],FUN=generate,previous=previous,endogenous=x$station,exogen=exogen[ncnt,],monthly.factor=factor(months)[ncnt],n=1,...)) previous[-1,] <- previous[-x$p,] previous[1,] <- out[ncnt,] } } return(out) }
knitr::opts_chunk$set( collapse = TRUE, comment = " ) library(SplitKnockoff)
library(usethis) source("data-raw/01-small_table.R") source("data-raw/04-specifications.R") source("data-raw/05-game_revenue.R") source("data-raw/06-game_revenue_info.R") usethis::use_data( small_table, specifications, game_revenue, game_revenue_info, internal = FALSE, overwrite = TRUE )
figure_detectAllPoints <- function (aBinaryPlot, sensitivity = 0.2, point_shape = "circle", point_size = 5) { .metagearDependencies("EBImage") point_shape <- switch(point_shape, "circle" = "disc", "square" = "box", "diamond" = "diamond", .metagearPROBLEM("error", paste(point_shape, "is not a valid shape option")) ) pointBrush <- EBImage::makeBrush(size = point_size, shape = point_shape, step = TRUE) aPaintedFigure <- EBImage::opening(EBImage::distmap(aBinaryPlot), pointBrush) detectedPointsFigure <- EBImage::watershed(EBImage::distmap(aPaintedFigure), tolerance = sensitivity, ext = 1) return(detectedPointsFigure) }
knitr::opts_chunk$set(echo = TRUE,fig.width=8, fig.height=6) library(Fiscore) pdb_path<- system.file("extdata", "6kz5.pdb", package="Fiscore") pdb_df<-PDB_process(pdb_path) pdb_df pdb_path<-system.file("extdata", "6kz5_A.pdb", package="Fiscore") pdb_df<-PDB_prepare(pdb_path) head(pdb_df) phi_psi_plot(pdb_df) phi_psi_bar_plot(pdb_df) phi_psi_interactive(pdb_df) B_plot_normalised(pdb_df) phi_psi_3D(pdb_df) Fi_score_plot(pdb_df) Fi_score_region(pdb_df,50,70) Fiscore_secondary(pdb_df) hydrophobicity_plot(pdb_df,window = 9,weight = 25,model = "linear") hydrophobicity_plot(pdb_df,window = 9,weight = 25,model = "exponential") df<-cluster_ID(pdb_df) df<-cluster_ID(pdb_df,clusters = 5, modelNames = "VVI") density_plots(pdb_df) density_plots(pdb_df, df)
test_that("can create a basic workflow", { workflow <- workflow() expect_s3_class(workflow, "workflow") expect_s3_class(workflow$pre, "stage_pre") expect_s3_class(workflow$fit, "stage_fit") expect_s3_class(workflow$post, "stage_post") expect_equal(workflow$pre$actions, list()) expect_equal(workflow$pre$mold, NULL) expect_equal(workflow$fit$actions, list()) expect_equal(workflow$fit$fit, NULL) expect_equal(workflow$post$actions, list()) }) test_that("workflow must be the first argument when adding actions", { rec <- recipes::recipe(mpg ~ cyl, mtcars) mod <- parsnip::linear_reg() expect_error(add_formula(1, mpg ~ cyl), "must be a workflow") expect_error(add_recipe(1, rec), "must be a workflow") expect_error(add_model(1, mod), "must be a workflow") }) test_that("can add a model spec directly to a workflow", { mod <- parsnip::linear_reg() workflow <- workflow(spec = mod) expect_identical(workflow$fit$actions$model$spec, mod) }) test_that("can add a preprocessor directly to a workflow", { preprocessor <- recipes::recipe(mpg ~ cyl, mtcars) workflow <- workflow(preprocessor) expect_identical(workflow$pre$actions$recipe$recipe, preprocessor) preprocessor <- mpg ~ cyl workflow <- workflow(preprocessor) expect_identical(workflow$pre$actions$formula$formula, preprocessor) preprocessor <- workflow_variables(mpg, cyl) workflow <- workflow(preprocessor) expect_identical(workflow$pre$actions$variables$variables, preprocessor) }) test_that("model spec is validated", { expect_snapshot(error = TRUE, workflow(spec = 1)) }) test_that("preprocessor is validated", { expect_snapshot(error = TRUE, workflow(preprocessor = 1)) }) test_that("constructor validates input", { expect_error(new_workflow(pre = 1), "must be a `stage`") expect_error(new_workflow(fit = 1), "must be a `stage`") expect_error(new_workflow(post = 1), "must be a `stage`") expect_error(new_workflow(trained = 1), "must be a single logical value") }) test_that("can check if a workflow is trained", { rec <- recipes::recipe(mpg ~ cyl, mtcars) mod <- parsnip::linear_reg() mod <- parsnip::set_engine(mod, "lm") wf <- workflow() wf <- add_recipe(wf, rec) wf <- add_model(wf, mod) expect_false(is_trained_workflow(wf)) wf <- fit(wf, mtcars) expect_true(is_trained_workflow(wf)) }) test_that("input must be a workflow", { expect_snapshot_error(is_trained_workflow(1)) })
context("Full name") test_that("fullname fallback", { mockery::stub(fullname, "system", function(cmd, ...) { if (grepl("^git config", cmd)) { "Joe Jamba" } else { NULL } }) expect_equal(fullname(), "Joe Jamba") }) test_that("fullname works", { fn <- try(fullname(), silent = TRUE) if (!inherits(fn, "try-error")) { expect_equal(class(fn), "character") expect_equal(length(fn), 1) expect_match(fn, ".*") } }) test_that("FULLNAME env var", { expect_equal( withr::with_envvar(c("FULLNAME" = "Bugs Bunny"), fullname()), "Bugs Bunny") })
rpp_read <- function(filename, header=TRUE, selected=c(2, 3, 4, 0, 0, 1, 0, 0), ...) { if (header == TRUE) { dat <- read.csv(filename, na.strings = "NA", dec = ".", header = TRUE, strip.white = TRUE, skipNul = TRUE, ...) colnames(dat) <- list() } else { dat <- read.csv(filename, na.strings = "NA", dec = ".", header = FALSE, strip.white = TRUE, skipNul = TRUE, ...) } istrain <- selected[1] istrain_rate <- selected[2] istress <- selected[3] ielast <- selected[4] ivisco <- selected[5] iraw_time <- selected[6] iraw_stress<- selected[7] iraw_strain <- selected[8] if (istrain != 0) { strain <- dat[istrain] } else { print("strain is missing") strain <- data.frame(numeric(length = dim(dat)[1])) } if (istrain_rate != 0) { strain_rate <- dat[istrain_rate] } else { print("strain_rate_is_missing") strainrate <- data.frame(numeric(length = dim(dat)[1])) } if (istress != 0) { stress <- dat[istress] } else { print("stress is missing") stress <- data.frame(numeric(length = dim(dat)[1])) } if (ielast != 0) { elast_stress <- dat[ielast] } else { print("Elastic-Stress is missing") elast_stress <- data.frame(numeric(length = dim(dat)[1])) } if (ivisco != 0) { visco_stress <- dat[ivisco] } else { print("Visco-Stress is missing") visco_stress <- data.frame(numeric(length = dim(dat)[1])) } if (iraw_time != 0) { raw_time <- dat[iraw_time] } else { print("Raw Time is missing") raw_time <- data.frame(numeric(length = dim(dat)[1])) } if (iraw_stress != 0) { raw_stress <- dat[iraw_stress] } else { print("Raw Stress is missing") raw_stress <- numeric(length = dim(dat)[1]) } if (iraw_strain != 0) { raw_strain <- dat[iraw_strain] } else { print("Raw Strain is missing") raw_strain <- numeric(length = dim(dat)[1]) } ds <- data.frame(strain, strain_rate, stress, elast_stress, visco_stress, raw_time, raw_stress,raw_strain) colnames(ds) <- c("strain", "strain_rate", "stress", "elast_stress", "visco_stress", "raw_time", "raw_stress","raw_strain") return(ds) } rpp_read2 <- function(dat , selected=c(2, 3, 4, 0, 0, 1, 0, 0), ...) { istrain <- selected[1] istrain_rate <- selected[2] istress <- selected[3] ielast <- selected[4] ivisco <- selected[5] iraw_time <- selected[6] iraw_stress<- selected[7] iraw_strain <- selected[8] if (istrain != 0) { strain <- dat[istrain] } else { print("strain is missing") strain <- data.frame(numeric(length = dim(dat)[1])) } if (istrain_rate != 0) { strain_rate <- dat[istrain_rate] } else { print("strain_rate_is_missing") strainrate <- data.frame(numeric(length = dim(dat)[1])) } if (istress != 0) { stress <- dat[istress] } else { print("stress is missing") stress <- data.frame(numeric(length = dim(dat)[1])) } if (ielast != 0) { elast_stress <- dat[ielast] } else { print("Elastic-Stress is missing") elast_stress <- data.frame(numeric(length = dim(dat)[1])) } if (ivisco != 0) { visco_stress <- dat[ivisco] } else { print("Visco-Stress is missing") visco_stress <- data.frame(numeric(length = dim(dat)[1])) } if (iraw_time != 0) { raw_time <- dat[iraw_time] } else { print("Raw Time is missing") raw_time <- data.frame(numeric(length = dim(dat)[1])) } if (iraw_stress != 0) { raw_stress <- dat[iraw_stress] } else { print("Raw Stress is missing") raw_stress <- numeric(length = dim(dat)[1]) } if (iraw_strain != 0) { raw_strain <- dat[iraw_strain] } else { print("Raw Strain is missing") raw_strain <- numeric(length = dim(dat)[1]) } ds <- data.frame(strain, strain_rate, stress, elast_stress, visco_stress, raw_time, raw_stress,raw_strain) colnames(ds) <- c("strain", "strain_rate", "stress", "elast_stress", "visco_stress", "raw_time", "raw_stress","raw_strain") return(ds) }
setMethod(f = "draw_heatmap_legend", signature = "HeatmapList", definition = function(object, legend_list = list(), ...) { side = object@heatmap_legend_param$side size = object@heatmap_legend_param$size padding = object@heatmap_legend_param$padding direction = object@direction legend_grouping = object@ht_list_param$legend_grouping ColorMappingList = list() ColorMappingParamList = list() for(i in seq_along(object@ht_list)) { ht = object@ht_list[[i]] if(direction == "horizontal") { if(inherits(object@ht_list[[i]], "Heatmap")) { if(!is.null(ht@left_annotation)) { if(object@ht_list_param$merge_legends || legend_grouping == "adjusted") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@left_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@left_annotation)) } } if(!is.null(ht@top_annotation)) { if(object@ht_list_param$merge_legends) { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@top_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@top_annotation)) } } if(object@ht_list[[i]]@heatmap_param$show_heatmap_legend) { ColorMappingList = c.list(ColorMappingList, object@ht_list[[i]]@matrix_color_mapping) ColorMappingParamList = c.list(ColorMappingParamList, object@ht_list[[i]]@matrix_legend_param) } if(!is.null(ht@bottom_annotation)) { if(object@ht_list_param$merge_legends) { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@bottom_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@bottom_annotation)) } } if(!is.null(ht@right_annotation)) { if(object@ht_list_param$merge_legends || legend_grouping == "adjusted") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@right_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@right_annotation)) } } } else if(inherits(object@ht_list[[i]], "HeatmapAnnotation")) { if(object@ht_list_param$merge_legends || legend_grouping == "adjusted") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(object@ht_list[[i]])) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(object@ht_list[[i]])) } } } else { if(inherits(object@ht_list[[i]], "Heatmap")) { if(!is.null(ht@left_annotation)) { if(object@ht_list_param$merge_legends) { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@left_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@left_annotation)) } } if(!is.null(ht@top_annotation)) { if(object@ht_list_param$merge_legends || legend_grouping == "adjusted") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@top_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@top_annotation)) } } if(object@ht_list[[i]]@heatmap_param$show_heatmap_legend) { ColorMappingList = c.list(ColorMappingList, object@ht_list[[i]]@matrix_color_mapping) ColorMappingParamList = c.list(ColorMappingParamList, object@ht_list[[i]]@matrix_legend_param) } if(!is.null(ht@bottom_annotation)) { if(object@ht_list_param$merge_legends || legend_grouping == "adjusted") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@bottom_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@bottom_annotation)) } } if(!is.null(ht@right_annotation)) { if(object@ht_list_param$merge_legends) { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@right_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@right_annotation)) } } } else if(inherits(object@ht_list[[i]], "HeatmapAnnotation")) { if(object@ht_list_param$merge_legends || legend_grouping == "adjusted") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(object@ht_list[[i]])) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(object@ht_list[[i]])) } } } } annotation_legend_side = object@annotation_legend_param$side annotation_legend_size = object@annotation_legend_param$size align_legend = object@heatmap_legend_param$align_legend if(is.null(align_legend)) { align_legend = guess_align_legend(object, object@heatmap_legend_param$size, object@annotation_legend_param$size, object@heatmap_legend_param$side, object@annotation_legend_param$side, test_on = "heatmap_legend") } if(align_legend %in% c("global_center", "global-center")) { if(side != annotation_legend_side) { y = unit(0.5, "npc") pushViewport(viewport(name = "heatmap_legend", x = unit(0.5, "npc"), y = y, width = size[1], height = size[2], just = c("center", "center"))) } else { if(side %in% c("left", "right")) { y1 = unit(0.5, "npc") + size[2]*0.5 y2 = unit(0.5, "npc") + annotation_legend_size[2]*0.5 y = max(y1, y2) pushViewport(viewport(name = "heatmap_legend", x = unit(0.5, "npc"), y = y, width = size[1], height = size[2], just = c("center", "top"))) } else { x1 = unit(0.5, "npc") - size[1]*0.5 x2 = unit(0.5, "npc") - annotation_legend_size[1]*0.5 x = min(x1, x2) pushViewport(viewport(name = "heatmap_legend", x = x, y = unit(0.5, "npc"), width = size[1], height = size[2], just = c("left", "center"))) } } } else { if(align_legend %in% c("heatmap_top", "heatmap-top")) { if(!side %in% c("left", "right")) { stop_wrap("Heatmap legends should be put on the left or right side of the heatmaps if `align_legend` is 'heatmap_top'.") } if(object@direction == "horizontal") { top_h = sum(component_height(object@ht_list[[ which_main_ht(object) ]], 1:4)) } else { top_h = calc_before_h(object) } y = unit(1, "npc") - top_h legend_just = "top" x = unit(0.5, "npc") } else if(align_legend %in% c("heatmap_left", "heatmap-left")) { if(!side %in% c("top", "bottom")) { stop_wrap("Heatmap legends should be put on the top or bottom side of the heatmaps if `align_legend` is 'heatmap_left'.") } if(object@direction == "vertical") { left_w = sum(component_width(object@ht_list[[ which_main_ht(object) ]], 1:4)) } else { left_w = calc_before_w(object) } x = left_w legend_just = "left" y = unit(0.5, "npc") } else if(align_legend %in% c("heatmap_center", "heatmap-center")) { if(side %in% c("left", "right")) { if(object@direction == "horizontal") { bottom_h = sum(component_height(object@ht_list[[ which_main_ht(object) ]], 6:9)) top_h = sum(component_height(object@ht_list[[ which_main_ht(object) ]], 1:4)) anno_bottom_extend = do.call("unit.c", lapply(object@ht_list, function(ht) { if(inherits(ht, "HeatmapAnnotation")) { ht@extended[1] } else { if(!is.null(ht@left_annotation)) { u1 = ht@left_annotation@extended[1] } else { u1 = unit(0, "mm") } if(!is.null(ht@right_annotation)) { u2 = ht@right_annotation@extended[1] } else { u2 = unit(0, "mm") } unit.c(u1, u2) } })) bottom_h = max(unit.c(bottom_h, anno_bottom_extend)) bottom_h = convertHeight(bottom_h, "mm") } else { bottom_h = calc_after_h(object) top_h = calc_before_h(object) } ht_h = unit(1, "npc") - top_h - bottom_h y = bottom_h + ht_h*0.5 legend_just = "center" x = unit(0.5, "npc") } else { if(object@direction == "horizontal") { left_w = calc_before_w(object) right_w = calc_after_w(object) } else { left_w = sum(component_width(object@ht_list[[ which_main_ht(object) ]], 1:4)) right_w = sum(component_width(object@ht_list[[ which_main_ht(object) ]], 6:9)) } ht_w = unit(1, "npc") - left_w - right_w x = left_w + ht_w*0.5 legend_just = "center" y = unit(0.5, "npc") } } else { stop_wrap("Wrong value for `align_heatmap_legend`.") } if(side != annotation_legend_side) { pushViewport(viewport(name = "heatmap_legend", x = x, y = y, width = size[1], height = size[2], just = legend_just)) } else { if(side %in% c("left", "right")) { if(align_legend %in% c("heatmap_center", "heatmap-center")) { y = bottom_h + ht_h*0.5 + max(size[2]*0.5, annotation_legend_size[2]*0.5) legend_just = "top" } pushViewport(viewport(name = "heatmap_legend", x = x, y = y, width = size[1], height = size[2], just = legend_just)) } else { if(align_legend %in% c("heatmap_center", "heatmap-center")) { x = left_w + ht_w*0.5 - max(size[1]*0.5, annotation_legend_size[1]*0.5) legend_just = "left" } pushViewport(viewport(name = "heatmap_legend", x = x, y = y, width = size[1], height = size[2], just = legend_just)) } } } draw_legend(ColorMappingList, ColorMappingParamList, side = side, legend_list = legend_list, padding = padding, ...) upViewport() }) setMethod(f = "draw_annotation_legend", signature = "HeatmapList", definition = function(object, legend_list = list(), ...) { side = object@annotation_legend_param$side size = object@annotation_legend_param$size padding = object@annotation_legend_param$padding offset = object@annotation_legend_param$offset direction = object@direction legend_grouping = object@ht_list_param$legend_grouping ColorMappingList = list() ColorMappingParamList = list() for(i in seq_along(object@ht_list)) { ht = object@ht_list[[i]] if(direction == "horizontal") { if(inherits(ht, "Heatmap")) { if(!is.null(ht@left_annotation)) { if(legend_grouping == "original") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@left_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@left_annotation)) } } if(!is.null(ht@top_annotation)) { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@top_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@top_annotation)) } if(!is.null(ht@bottom_annotation)) { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@bottom_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@bottom_annotation)) } if(!is.null(ht@right_annotation)) { if(legend_grouping == "original") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@right_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@right_annotation)) } } } else if(inherits(ht, "HeatmapAnnotation")) { if(legend_grouping == "original") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_color_mapping_list(ht)) } } } else { if(inherits(ht, "Heatmap")) { if(!is.null(ht@left_annotation)) { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@left_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@left_annotation)) } if(!is.null(ht@top_annotation)) { if(legend_grouping == "original") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@top_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@top_annotation)) } } if(!is.null(ht@bottom_annotation)) { if(legend_grouping == "original") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@bottom_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@bottom_annotation)) } } if(!is.null(ht@right_annotation)) { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@right_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@right_annotation)) } } else if(inherits(ht, "HeatmapAnnotation")) { if(legend_grouping == "original") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_color_mapping_list(ht)) } } } } heatmap_legend_side = object@heatmap_legend_param$side heatmap_legend_size = object@heatmap_legend_param$size align_legend = object@annotation_legend_param$align_legend if(is.null(align_legend)) { align_legend = guess_align_legend(object, object@heatmap_legend_param$size, object@annotation_legend_param$size, object@heatmap_legend_param$side, object@annotation_legend_param$side, test_on = "annotation_legend") } if(align_legend %in% c("global_center", "global-center")) { if(side != heatmap_legend_side) { pushViewport(viewport(name = "annotation_legend", x = unit(0.5, "npc"), y = unit(0.5, "npc"), width = size[1], height = size[2], just = c("center", "center"))) } else { if(side %in% c("left", "right")) { y1 = unit(0.5, "npc") + size[2]*0.5 y2 = unit(0.5, "npc") + heatmap_legend_size[2]*0.5 y = max(y1, y2) pushViewport(viewport(name = "annotation_legend", x = unit(0.5, "npc"), y = y, width = size[1], height = size[2], just = c("center", "top"))) } else { x1 = unit(0.5, "npc") - size[1]*0.5 x2 = unit(0.5, "npc") - heatmap_legend_size[1]*0.5 x = min(x1, x2) pushViewport(viewport(name = "annotation_legend", x = x, y = unit(0.5, "npc"), width = size[1], height = size[2], just = c("left", "center"))) } } } else { if(align_legend %in% c("heatmap_top", "heatmap-top")) { if(!side %in% c("left", "right")) { stop_wrap("Annotation legends should be put on the left or right side of the heatmaps if `align_legend` is 'heatmap_top'.") } if(object@direction == "horizontal") { top_h = sum(component_height(object@ht_list[[ which_main_ht(object) ]], 1:4)) } else { top_h = calc_before_h(object) } y = unit(1, "npc") - top_h legend_just = "top" x = unit(0.5, "npc") } else if(align_legend %in% c("heatmap_left", "heatmap-left")) { if(!side %in% c("top", "bottom")) { stop_wrap("Annotation legends should be put on the top or bottom side of the heatmaps if `align_legend` is 'heatmap_left'.") } if(object@direction == "vertical") { left_w = sum(component_width(object@ht_list[[ which_main_ht(object) ]], 1:4)) } else { left_w = calc_before_w(object) } x = left_w legend_just = "left" y = unit(0.5, "npc") } else if(align_legend %in% c("heatmap_center", "heatmap-center")) { if(side %in% c("left", "right")) { if(object@direction == "horizontal") { bottom_h = sum(component_height(object@ht_list[[ which_main_ht(object) ]], 6:9)) top_h = sum(component_height(object@ht_list[[ which_main_ht(object) ]], 1:4)) anno_bottom_extend = do.call("unit.c", lapply(object@ht_list, function(ht) { if(inherits(ht, "HeatmapAnnotation")) { ht@extended[1] } else { if(!is.null(ht@left_annotation)) { u1 = ht@left_annotation@extended[1] } else { u1 = unit(0, "mm") } if(!is.null(ht@right_annotation)) { u2 = ht@right_annotation@extended[1] } else { u2 = unit(0, "mm") } unit.c(u1, u2) } })) bottom_h = max(unit.c(bottom_h, anno_bottom_extend)) bottom_h = convertHeight(bottom_h, "mm") } else { bottom_h = calc_after_h(object) top_h = calc_before_h(object) } ht_h = unit(1, "npc") - top_h - bottom_h y = bottom_h + ht_h*0.5 legend_just = "center" x = unit(0.5, "npc") } else { if(object@direction == "horizontal") { left_w = calc_before_w(object) right_w = calc_after_w(object) } else { left_w = sum(component_width(object@ht_list[[ which_main_ht(object) ]], 1:4)) right_w = sum(component_width(object@ht_list[[ which_main_ht(object) ]], 6:9)) } ht_w = unit(1, "npc") - left_w - right_w x = left_w + ht_w*0.5 legend_just = "center" y = unit(0.5, "npc") } } else { stop_wrap("Wrong value for `align_annotation_legend`.") } if(side != heatmap_legend_side) { pushViewport(viewport(name = "annotation_legend", x = x, y = y, width = size[1], height = size[2], just = legend_just)) } else { if(side %in% c("left", "right")) { if(align_legend %in% c("heatmap_center", "heatmap-center")) { y = bottom_h + ht_h*0.5 + max(size[2]*0.5, heatmap_legend_size[2]*0.5) legend_just = "top" } pushViewport(viewport(name = "annotation_legend", x = x, y = y, width = size[1], height = size[2], just = legend_just)) } else { if(align_legend %in% c("heatmap_center", "heatmap-center")) { x = left_w + ht_w*0.5 - max(size[1]*0.5, heatmap_legend_size[1]*0.5) legend_just = "left" } pushViewport(viewport(name = "annotation_legend", x = x, y = y, width = size[1], height = size[2], just = legend_just)) } } } draw_legend(ColorMappingList, ColorMappingParamList, side = side, legend_list = legend_list, padding = padding, ...) upViewport() }) calc_ht_h = function(ht_list, inlcude_bottom = FALSE) { if(ht_list@direction == "horizontal") { bottom_h = sum(component_height(ht_list@ht_list[[ which_main_ht(ht_list) ]], 6:9)) top_h = sum(component_height(ht_list@ht_list[[ which_main_ht(ht_list) ]], 1:4)) } else { bottom_h = calc_after_h(ht_list) top_h = calc_before_h(ht_list) } ht_h = unit(1, "npc") - top_h - bottom_h if(inlcude_bottom) { ht_h = unit(1, "npc") - top_h } ht_h } calc_ht_w = function(ht_list, include_right = FALSE) { if(ht_list@direction == "horizontal") { left_w = calc_before_w(ht_list) right_w = calc_after_w(ht_list) } else { left_w = sum(component_width(ht_list@ht_list[[ which_main_ht(ht_list) ]], 1:4)) right_w = sum(component_width(ht_list@ht_list[[ which_main_ht(ht_list) ]], 6:9)) } ht_w = unit(1, "npc") - left_w - right_w if(include_right) { ht_w = unit(1, "npc") - left_w } ht_w } calc_before_h = function(ht_list) { i = which_first_ht(ht_list) if(i > 1) { unit(0, "mm") } else { sum(component_height(ht_list@ht_list[[ i ]], 6:9)) } } calc_after_h = function(ht_list) { i = which_last_ht(ht_list) if(i < length(ht_list@ht_list)) { unit(0, "mm") } else { sum(component_height(ht_list@ht_list[[ i ]], 6:9)) } } calc_before_w = function(ht_list) { i = which_first_ht(ht_list) if(i > 1) { unit(0, "mm") } else { sum(component_width(ht_list@ht_list[[ i ]], 1:4)) } } calc_after_w = function(ht_list) { i = which_last_ht(ht_list) if(i < length(ht_list@ht_list)) { unit(0, "mm") } else { sum(component_width(ht_list@ht_list[[ i ]], 6:9)) } } guess_align_legend = function(ht_list, heatmap_legend_size, annotation_legend_size, heatmap_legend_side, annotation_legend_side, test_on) { if(test_on == "heatmap_legend") { if(heatmap_legend_side == annotation_legend_side) { if(!is.null(attr(annotation_legend_size, "multiple"))) { if(attr(heatmap_legend_size, "multiple") != 1 || attr(annotation_legend_size, "multiple") != 1) { return("global_center") } } } if(attr(heatmap_legend_size, "multiple") != 1) { return("global_center") } } if(test_on == "annotation_legend") { if(heatmap_legend_side == annotation_legend_side) { if(!is.null(attr(heatmap_legend_size, "multiple"))) { if(attr(heatmap_legend_size, "multiple") != 1 || attr(annotation_legend_size, "multiple") != 1) { return("global_center") } } } if(attr(annotation_legend_size, "multiple") != 1) { return("global_center") } } heatmap_legend_size align_legend = NULL if(heatmap_legend_side == annotation_legend_side) { if(ifelse(test_on == "heatmap_legend", heatmap_legend_side, annotation_legend_side) %in% c("left", "right")) { ht_h = calc_ht_h(ht_list) if(convertHeight(ht_h, "mm", valueOnly = TRUE) >= max(as.numeric(heatmap_legend_size[2]), as.numeric(annotation_legend_size[2]))) { align_legend = "heatmap_center" } } else { ht_w = calc_ht_w(ht_list) if(convertWidth(ht_w, "mm", valueOnly = TRUE) >= max(as.numeric(heatmap_legend_size[1]), as.numeric(annotation_legend_size[1]))) { align_legend = "heatmap_center" } } if(is.null(align_legend)) { if(ifelse(test_on == "heatmap_legend", heatmap_legend_side, annotation_legend_side) %in% c("left", "right")) { ht_h = calc_ht_h(ht_list, TRUE) if(convertHeight(ht_h, "mm", valueOnly = TRUE) >= max(as.numeric(heatmap_legend_size[2]), as.numeric(annotation_legend_size[2]))) { align_legend = "heatmap_top" } } else { ht_w = calc_ht_w(ht_list, TRUE) if(convertWidth(ht_w, "mm", valueOnly = TRUE) >= max(as.numeric(heatmap_legend_size[1]), as.numeric(annotation_legend_size[1]))) { align_legend = "heatmap_left" } } } if(is.null(align_legend)) { align_legend = "global_center" } } else { if(ifelse(test_on == "heatmap_legend", heatmap_legend_side, annotation_legend_side) %in% c("left", "right")) { ht_h = calc_ht_h(ht_list) if(convertHeight(ht_h, "mm", valueOnly = TRUE) >= ifelse(test_on == "heatmap_legend", as.numeric(heatmap_legend_size[2]), as.numeric(annotation_legend_size[2]))) { align_legend = "heatmap_center" } } else { ht_w = calc_ht_w(ht_list) if(convertWidth(ht_w, "mm", valueOnly = TRUE) >= ifelse(test_on == "heatmap_legend", as.numeric(heatmap_legend_size[1]), as.numeric(annotation_legend_size[1]))) { align_legend = "heatmap_center" } } if(is.null(align_legend)) { if(ifelse(test_on == "heatmap_legend", heatmap_legend_side, annotation_legend_side) %in% c("left", "right")) { ht_h = calc_ht_h(ht_list, TRUE) if(convertHeight(ht_h, "mm", valueOnly = TRUE) >= ifelse(test_on == "heatmap_legend", as.numeric(heatmap_legend_size[2]), as.numeric(annotation_legend_size[2]))) { align_legend = "heatmap_top" } } else { ht_w = calc_ht_w(ht_list, TRUE) if(convertWidth(ht_w, "mm", valueOnly = TRUE) >= ifelse(test_on == "heatmap_legend", as.numeric(heatmap_legend_size[1]), as.numeric(annotation_legend_size[1]))) { align_legend = "heatmap_left" } } } if(is.null(align_legend)) { align_legend = "global_center" } } return(align_legend) } setMethod(f = "heatmap_legend_size", signature = "HeatmapList", definition = function(object, legend_list = list(), ...) { side = object@heatmap_legend_param$side padding = object@heatmap_legend_param$padding direction = object@direction legend_grouping = object@ht_list_param$legend_grouping ColorMappingList = list() ColorMappingParamList = list() for(i in seq_along(object@ht_list)) { ht = object@ht_list[[i]] if(direction == "horizontal") { if(inherits(object@ht_list[[i]], "Heatmap")) { if(!is.null(ht@left_annotation)) { if(object@ht_list_param$merge_legends || legend_grouping == "adjusted") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@left_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@left_annotation)) } } if(!is.null(ht@top_annotation)) { if(object@ht_list_param$merge_legends) { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@top_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@top_annotation)) } } if(object@ht_list[[i]]@heatmap_param$show_heatmap_legend) { ColorMappingList = c.list(ColorMappingList, object@ht_list[[i]]@matrix_color_mapping) ColorMappingParamList = c.list(ColorMappingParamList, object@ht_list[[i]]@matrix_legend_param) } if(!is.null(ht@bottom_annotation)) { if(object@ht_list_param$merge_legends) { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@bottom_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@bottom_annotation)) } } if(!is.null(ht@right_annotation)) { if(object@ht_list_param$merge_legends || legend_grouping == "adjusted") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@right_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@right_annotation)) } } } else if(inherits(object@ht_list[[i]], "HeatmapAnnotation")) { if(object@ht_list_param$merge_legends || legend_grouping == "adjusted") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(object@ht_list[[i]])) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(object@ht_list[[i]])) } } } else { if(inherits(object@ht_list[[i]], "Heatmap")) { if(!is.null(ht@left_annotation)) { if(object@ht_list_param$merge_legends) { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@left_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@left_annotation)) } } if(!is.null(ht@top_annotation)) { if(object@ht_list_param$merge_legends || legend_grouping == "adjusted") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@top_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@top_annotation)) } } if(object@ht_list[[i]]@heatmap_param$show_heatmap_legend) { ColorMappingList = c.list(ColorMappingList, object@ht_list[[i]]@matrix_color_mapping) ColorMappingParamList = c.list(ColorMappingParamList, object@ht_list[[i]]@matrix_legend_param) } if(!is.null(ht@bottom_annotation)) { if(object@ht_list_param$merge_legends || legend_grouping == "adjusted") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@bottom_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@bottom_annotation)) } } if(!is.null(ht@right_annotation)) { if(object@ht_list_param$merge_legends) { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@right_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@right_annotation)) } } } else if(inherits(object@ht_list[[i]], "HeatmapAnnotation")) { if(object@ht_list_param$merge_legends || legend_grouping == "adjusted") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(object@ht_list[[i]])) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(object@ht_list[[i]])) } } } } size = draw_legend(ColorMappingList, ColorMappingParamList, side = side, plot = FALSE, legend_list = legend_list, padding = padding, ...) return(size) }) setMethod(f = "annotation_legend_size", signature = "HeatmapList", definition = function(object, legend_list = list(), ...) { side = object@annotation_legend_param$side padding = object@annotation_legend_param$padding direction = object@direction legend_grouping = object@ht_list_param$legend_grouping ColorMappingList = list() ColorMappingParamList = list() for(i in seq_along(object@ht_list)) { ht = object@ht_list[[i]] if(direction == "horizontal") { if(inherits(ht, "Heatmap")) { if(!is.null(ht@left_annotation)) { if(legend_grouping == "original") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@left_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@left_annotation)) } } if(!is.null(ht@top_annotation)) { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@top_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@top_annotation)) } if(!is.null(ht@bottom_annotation)) { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@bottom_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@bottom_annotation)) } if(!is.null(ht@right_annotation)) { if(legend_grouping == "original") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@right_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@right_annotation)) } } } else if(inherits(ht, "HeatmapAnnotation")) { if(legend_grouping == "original") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht)) } } } else { if(inherits(ht, "Heatmap")) { if(!is.null(ht@left_annotation)) { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@left_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@left_annotation)) } if(!is.null(ht@top_annotation)) { if(legend_grouping == "original") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@top_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@top_annotation)) } } if(!is.null(ht@bottom_annotation)) { if(legend_grouping == "original") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@bottom_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@bottom_annotation)) } } if(!is.null(ht@right_annotation)) { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht@right_annotation)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht@right_annotation)) } } else if(inherits(ht, "HeatmapAnnotation")) { if(legend_grouping == "original") { ColorMappingList = c.list(ColorMappingList, list = get_color_mapping_list(ht)) ColorMappingParamList = c.list(ColorMappingParamList, list = get_legend_param_list(ht)) } } } } size = draw_legend(ColorMappingList, ColorMappingParamList, side = side, plot = FALSE, legend_list = legend_list, padding = padding, ...) return(size) }) draw_legend = function(ColorMappingList, ColorMappingParamList, side = c("right", "left", "top", "bottom"), plot = TRUE, gap = unit(4, "mm"), legend_list = list(), padding = unit(c(0, 0, 0, 0), "mm"), max_height = unit(par("din")[2], "in"), max_width = unit(par("din")[1], "in"), ...) { side = match.arg(side)[1] legend_names = sapply(ColorMappingList, function(x) x@name) ColorMappingParamList2 = ColorMappingList2 = vector("list", length(unique(legend_names))) names(ColorMappingParamList2) = names(ColorMappingList2) = unique(legend_names) for(i in seq_along(legend_names)) { if(is.null(ColorMappingList2[[ legend_names[i] ]])) { ColorMappingList2[[ legend_names[i] ]] = ColorMappingList[[i]] ColorMappingParamList2[[ legend_names[i] ]] = ColorMappingParamList[[i]] } else { if(ColorMappingList2[[ legend_names[i] ]]@type == "discrete" && ColorMappingList[[i]]@type == "discrete") { ColorMappingList2[[ legend_names[i] ]] = c(ColorMappingList2[[ legend_names[i] ]], ColorMappingList[[i]], name = legend_names[i]) } } } n = length(ColorMappingList2) if(n == 0 && length(legend_list) == 0) { u = unit(c(0, 0), "mm") attr(u, "multiple") = 1 return(u) } else { cm_grob = c(lapply(seq_along(ColorMappingList2), function(i) color_mapping_legend(ColorMappingList2[[i]], param = ColorMappingParamList2[[i]], plot = FALSE, ...)), legend_list) if(side %in% c("left", "right")) { pk = packLegend(list = cm_grob, gap = ht_opt$legend_gap[1], direction = "vertical", max_height = max_height) } else { pk = packLegend(list = cm_grob, gap = ht_opt$legend_gap[2], direction = "horizontal", max_width = max_width) } } width = width(pk) height = height(pk) if(plot) { if(side == "right") { draw(pk, x = padding[2] + width*0.5) } else if(side == "left") { draw(pk, x = width*0.5) } else if(side == "top") { draw(pk, y = padding[1] + height*0.5) } else if(side == "bottom") { draw(pk, y = height*0.5) } if(pk@multiple > 1) { if(is_RStudio_current_dev()) { if(ht_opt$message) { message_wrap("It seems you are using RStudio IDE. There are many legends and they are wrapped into multiple rows/columns. The arrangement relies on the physical size of the graphics device. It only generates correct plot in the figure panel, while in the zoomed plot (by clicking the icon 'Zoom') or in the exported plot (by clicking the icon 'Export'), the legend positions might be wrong. You can directly use e.g. pdf() to save the plot into a file.\n\nUse `ht_opt$message = FALSE` to turn off this message.") } } } } width = width + padding[2] + padding[4] height = height + padding[1] + padding[3] width = convertWidth(width, "mm") height = convertHeight(height, "mm") size = unit.c(width, height) attr(size, "multiple") = pk@multiple return(size) }
simDat.eQTL.scRNAseq = function(nSubj = 50, nCellPerSubj = 100, zero.p = 0.01, m.int = 0, sigma.int = 1, slope = 1, theta = 1, MAF = 0.45 ) { N = nSubj * nCellPerSubj id = rep(seq_len(nSubj), each = nCellPerSubj) pAA = MAF^2 pAB = 2*MAF*(1-MAF) pBB = (1-MAF)^2 geno = rep(sample(x=c(2, 1, 0), size = nSubj, replace = TRUE, prob = c(pAA, pAB, pBB)), each = nCellPerSubj) counts = rep(0, N) NBflag=sample(x=c(0,1), size=N, prob = c(zero.p, 1-zero.p), replace=TRUE) pos.NB = which(NBflag == 1) N.NB = length(pos.NB) intercept = rep(rnorm(n=nSubj, mean = m.int, sd = sigma.int), each = nCellPerSubj) mu = exp(intercept + slope * geno) counts[pos.NB] = rnbinom(n = N.NB, size = theta, mu = mu) frame = data.frame(id = id, geno = geno, counts = counts) invisible(frame) }
library(perm) library(coin) packageDescription("coin")$Version set.seed(1) nsim<-3 n<-8 outperm1<-outcoin1<-rep(NA,nsim) g<-factor(c(rep("a",n/2),rep("b",n/2))) y<-rnorm(n) permTS(y~g,method="exact.network",alternative="less") independence_test(y~g,distribution=exact(),alternative="less") permTS(y~g,method="pclt",alternative="less") independence_test(y~g,distribution=asymptotic(),alternative="less") permTS(y~g,method="exact.mc",alternative="less") independence_test(y~g,distribution=approximate(),alternative="less") for (i in 1:nsim){ y<-rnorm(n) outperm1[i]<-permTS(y~g,method="exact.network",alternative="less")$p.value outcoin1[i]<-pvalue(independence_test(y~g,distribution=exact(),alternative="less")) } all.equal(outperm1,outcoin1) n<-100 g<-factor(c(rep("a",n/2),rep("b",n/2))) for (i in 1:nsim){ y<-rnorm(n) outperm1[i]<-permTS(y~g,method="pclt",alternative="less")$p.value outcoin1[i]<-pvalue(independence_test(y~g,distribution=asymptotic(),alternative="less")) } all.equal(outperm1,outcoin1) outperm2<-outcoin2<-rep(NA,nsim) n<-100 set.seed(10301) g<-factor(c(rep("a",n/4),rep("b",n/4),rep("c",n/4),rep("d",n/4)),levels=c("c","d","b","a")) for (i in 1:nsim){ y<-rnorm(n) outperm2[i]<-permKS(y~g,method="pclt")$p.value outcoin2[i]<-pvalue(independence_test(y~g,teststat="quad",distribution=asymptotic())) } all.equal(outperm2,outcoin2)
StrategyConfig <- R6Class( "StrategyConfig", public = list( config = list(), initialize = function(config) { if (is.null(config$solver)) { config$solver <- "glpk" } if (is.null(config$vol_var)) { config$vol_var <- "rc_vol" } if (is.null(config$price_var)) { config$price_var <- "ref_price" } self$config <- config self$valid() invisible(self) }, getStrategyNames = function() { setdiff(names(self$config$strategies), "joint") }, getConfig = function(name) { config_val <- self$config[[name]] if (length(config_val) %in% 0) { return(NULL) } config_val }, getStrategyConfig = function(strategy, name) { config_val <- self$config$strategies[[strategy]][[name]] if (length(config_val) %in% 0) { return(NULL) } if (!name %in% c("in_var", "constraints")) { config_val <- as.numeric(config_val) } config_val }, valid = function() { top_level_required <- c("vol_var", "price_var", "solver") for (name in top_level_required) { if (is.null(self$getConfig(name))) { stop(paste0("Missing top-level setting: ", name)) } } if (length(self$getStrategyNames()) %in% 0) { stop("No strategies found in config") } strategy_name_blacklist <- c("joint") for (strategy in self$getStrategyNames()) { required_config_vars <- c("strategy_capital", "position_limit_pct_adv", "position_limit_pct_lmv", "position_limit_pct_smv", "trading_limit_pct_adv", "ideal_long_weight", "ideal_short_weight", "in_var" ) for (config_var in required_config_vars) { if (is.null(self$getStrategyConfig(strategy, config_var))) { stop(paste0("Missing ", config_var, " setting in strategy config for strategy: ", strategy)) } } strategy_name_blacklist <- c(strategy_name_blacklist, self$getStrategyConfig(strategy, "in_var")) constraint_config <- self$getStrategyConfig(strategy, "constraints") for (constraint_name in names(constraint_config)) { in_var <- constraint_config[[constraint_name]]$in_var if (is.null(in_var) || length(in_var) %in% 0) { stop(paste0("Missing in_var value for constraint: ", constraint_name)) } constraint_type <- constraint_config[[constraint_name]]$type if (is.null(constraint_type) || length(constraint_type) %in% 0) { stop(paste0("Missing constraint_type value for constraint: ", constraint_name)) } if (!constraint_type %in% c("factor", "category")) { stop(paste0("Invalid constraint type of constraint: ", constraint_name)) } strategy_name_blacklist <- c(strategy_name_blacklist, in_var) } } if (any(self$getStrategyNames() %in% strategy_name_blacklist)) { stop(paste0("Invalid strategy name found. Strategy names may not ", "match signal or constraint in_var names. ", "The strategy name 'joint' is reserved.")) } TRUE } ))
p.res <- function(s, z, mu, sig, w){ R <- dim(z)[1]; nlag = dim(z)[3] ret <- numeric(R) for(r in 1:R){ prev <- 1 for(lag in 1:nlag){ prev <- prev*sum(w[s,]*pnorm(z[r,s,lag], mu[s,,lag], sig[s,,lag])) } ret[r] <- prev } return(ret) } r.res <- function(R,S,nlag,w,m,s){ sim.Z <- array(0, dim=c(R,S,nlag)) for(sw in 1:S){ hwmany <- rmultinom(1, R, w[sw,]) r <- 1 c <- 1 while(r<R){ if(hwmany[c]>0){ if(dim(s)[3]==1){ sim.Z[r:(r+hwmany[c]-1),sw,] <- rnorm(hwmany[c], m[sw,c,], s[sw,c,]) }else{ sim.Z[r:(r+hwmany[c]-1),sw,] <- rmvnorm(hwmany[c], m[sw,c,], diag(s[sw,c,]^2)) } } r <- r+hwmany[c] c <- c+1 } } sim.Z <- matrix(sim.Z, nrow=R, ncol=S*nlag) return(sim.Z) }