code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package com.madhukaraphatak.sizeof.examples
import java.util.LinkedHashMap
import com.madhukaraphatak.sizeof.SizeEstimator
import org.apache.log4j.Logger
/**
* An implementation of Cache that estimates the sizes of its entries and
* attempts to limit its total memory usage to a fraction of the JVM heap.
* Objects' sizes are estimated using SizeEstimator, which has limitations;
* most notably, we will overestimate total memory used if some cache
* entries have pointers to a shared object. Nonetheless, this Cache should
* work well when most of the space is used by arrays of primitives or of
* simple classes.
*/
class BoundedMemoryCache {
private val maxBytes: Long = getMaxBytes()
private val logger = Logger.getLogger(classOf[BoundedMemoryCache])
logger.info("BoundedMemoryCache.maxBytes = " + maxBytes)
private var currentBytes = 0L
private val map = new LinkedHashMap[Any, Entry](32, 0.75f, true)
// An entry in our map; stores a cached object and its size in bytes
class Entry(val value: Any, val size: Long) {}
def get(key: Any): Any = {
synchronized {
val entry = map.get(key)
if (entry != null) entry.value else null
}
}
def put(key: Any, value: Any) {
logger.info("Asked to add key " + key)
val startTime = System.currentTimeMillis
val size = SizeEstimator.estimate(value.asInstanceOf[AnyRef])
val timeTaken = System.currentTimeMillis - startTime
logger.info("Estimated size for key %s is %d".format(key, size))
logger.info("Size estimation for key %s took %d ms".format(key, timeTaken))
synchronized {
ensureFreeSpace(size)
logger.info("Adding key " + key)
map.put(key, new Entry(value, size))
currentBytes += size
logger.info("Number of entries is now " + map.size)
}
}
private def getMaxBytes(): Long = {
val memoryFractionToUse = System.getProperty(
"boundedMemoryCache.memoryFraction", "0.75").toDouble
(Runtime.getRuntime.totalMemory * memoryFractionToUse).toLong
}
/**
* Remove least recently used entries from the map until at least space
* bytes are free. Assumes that a lock is held on the BoundedMemoryCache.
*/
private def ensureFreeSpace(space: Long) {
logger.info("ensureFreeSpace(%d) called with curBytes=%d, maxBytes=%d".format(
space, currentBytes, maxBytes))
val iter = map.entrySet.iterator
while (maxBytes - currentBytes < space && iter.hasNext) {
val mapEntry = iter.next()
dropEntry(mapEntry.getKey, mapEntry.getValue)
currentBytes -= mapEntry.getValue.size
iter.remove()
}
}
protected def dropEntry(key: Any, entry: Entry) {
logger.info("Dropping key %s of size %d to make space".format(
key, entry.size))
}
}
/**
* This code shows how to use the cache
*/
object BoundedMemoryCache {
def main(args: Array[String]) {
//define the percentage of vm to be used
System.setProperty("boundedMemoryCache.memoryFraction","0.01")
val cache = new BoundedMemoryCache
//put some values
cache.put("simpleKey","hello")
cache.put("bigKey",0 to 10000 map (index => index+1))
//cache starts dropping keys to get more space
cache.put("moreBigger", 0 to 100000 map (index => index+1))
}
}
| phatak-dev/java-sizeof | examples/src/main/scala/com/madhukaraphatak/sizeof/examples/BoundedMemoryCache.scala | Scala | apache-2.0 | 3,267 |
package org.openapitools.models
import io.circe._
import io.finch.circe._
import io.circe.generic.semiauto._
import io.circe.java8.time._
import org.openapitools._
import org.openapitools.models.ExtensionClassImpl
/**
*
* @param ioPeriodjenkinsPeriodblueoceanPeriodservicePeriodembeddedPeriodrestPeriodPipelineImpl
* @param ioPeriodjenkinsPeriodblueoceanPeriodservicePeriodembeddedPeriodrestPeriodMultiBranchPipelineImpl
* @param Underscoreclass
*/
case class ExtensionClassContainerImpl1map(ioPeriodjenkinsPeriodblueoceanPeriodservicePeriodembeddedPeriodrestPeriodPipelineImpl: Option[ExtensionClassImpl],
ioPeriodjenkinsPeriodblueoceanPeriodservicePeriodembeddedPeriodrestPeriodMultiBranchPipelineImpl: Option[ExtensionClassImpl],
Underscoreclass: Option[String]
)
object ExtensionClassContainerImpl1map {
/**
* Creates the codec for converting ExtensionClassContainerImpl1map from and to JSON.
*/
implicit val decoder: Decoder[ExtensionClassContainerImpl1map] = deriveDecoder
implicit val encoder: ObjectEncoder[ExtensionClassContainerImpl1map] = deriveEncoder
}
| cliffano/swaggy-jenkins | clients/scala-finch/generated/src/main/scala/org/openapitools/models/ExtensionClassContainerImpl1map.scala | Scala | mit | 1,148 |
// Copyright 2017 EPFL DATA Lab (data.epfl.ch)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package squid.utils
import org.scalatest.FunSuite
class CollectionUtilsTest extends FunSuite {
import CollectionUtils._
test("List Test") {
val (even,odd) = List(1,2,3,4,5) collectPartition {
case n if n % 2 == 1 => s"${n/2}*2+1"
}
assert((even: List[String]) == List("0*2+1", "1*2+1", "2*2+1"))
assert((odd: List[Int]) == List(2, 4))
{
val (evenStrs,oddStrs) = List(1,2,3,4,5) mapSplit {
case n if n % 2 == 0 => Left( s"${n/2} * 2")
case n => Right(s"${n/2} * 2 + 1")
}
assert((evenStrs: List[String]) == List("1 * 2", "2 * 2"))
assert((oddStrs: List[String]) == List("0 * 2 + 1", "1 * 2 + 1", "2 * 2 + 1"))
}
{
val (evenStrs,oddStrs) = List(1,2,3,4,5) collectOr ({
case n if n % 2 == 0 => s"${n/2} * 2"
}, n => s"${n/2} * 2 + 1")
assert((evenStrs: List[String]) == List("1 * 2", "2 * 2"))
assert((oddStrs: List[String]) == List("0 * 2 + 1", "1 * 2 + 1", "2 * 2 + 1"))
}
}
test("String Test") {
val (es,rest) = "el toro fuerte" collectPartition {
case c @ ('e' | 'o') => c.toUpper
}
assert((es: String) == "EOOEE")
assert((rest: String) == "l tr furt")
val (as,bs) = "Asi es el Toro Fuerte" mapSplit {
case c if c.isUpper => Left(c)
case c => Right(c)
}
assert(as == "ATF" && bs == "si es el oro uerte")
val (as0,bs0) = Map(1 -> "ok", 2 -> "ko") mapSplit {
case kv if kv._1 % 2 == 0 => Right(kv)
case kv => Left(kv)
}
assert(as0 == Map(1 -> "ok"))
}
test("Seq of Either to 2 Seqs") {
val ls = Seq(Left('ok), Right(42), Right(666), Left('ko), Right(-1))
val (syms, ints) = ls mapSplit identity
assert((syms: Seq[Symbol]) == List('ok, 'ko))
assert((ints: Seq[Int]) == List(42, 666, -1))
}
test("With map") {
val ctx = Map('a -> 1, 'b -> 2) map {case(n,v) => n->(n,v)}
val (bound, unbound) = Vector('a, 'a, 'c, 'b) collectPartition ctx
assert( (bound: Vector[(Symbol, Int)], unbound: Vector[Symbol]) == (Vector(('a,1), ('a,1), ('b,2)),Vector('c)) )
}
test("Zip and") {
assert( (List(1,2,3) zipAnd List(0,1,2))(_ + _) == List(1,3,5) )
// in fact, similar to:
assert( (List(1,2,3), List(0,1,2)).zipped.map(_ + _) == List(1,3,5) )
}
test("In-place filter") {
import scala.collection.mutable
val b0 = mutable.Buffer(1,2,3,5,7,8)
val b1 = b0.clone
b0.filter_!(_ % 2 == 0)
b1.filter_!(_ % 2 == 1)
assert(b0 == Seq(2,8))
assert(b1 == Seq(1,3,5,7))
}
}
| epfldata/squid | core/src/test/scala/squid/utils/CollectionUtilsTest.scala | Scala | apache-2.0 | 3,300 |
/*
* @author Philip Stutz
*
* Copyright 2014 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.triplerush
import com.signalcollect.interfaces.VertexToWorkerMapper
import com.signalcollect.interfaces.MapperFactory
import com.signalcollect.triplerush.EfficientIndexPattern._
import scala.util.hashing.MurmurHash3._
class LoadBalancingTripleMapper(val numberOfNodes: Int, val workersPerNode: Int) extends VertexToWorkerMapper[Long] {
val numberOfWorkers = numberOfNodes * workersPerNode
def getWorkerIdForVertexId(vertexId: Long): Int = {
val first = vertexId.extractFirst
val second = vertexId.extractSecond
val loadBalanceId = finalizeHash(mixLast(first, second), 3) & Int.MaxValue
if (first < 0) {
if (second < 0) {
// It's a query id, map to first node and load balance on the workers there.
loadBalanceId % workersPerNode
} else {
// First encodes a predicate, second encodes an object.
if (second > 0) {
// Object is not a wildcard and we use it for node assignment.
workerIdOptimized(nodeAssignmentId = second, workerAssignmentId = loadBalanceId)
} else {
// Everything but the predicate is a wildcard. We use the predicate for both node assignment and load balancing.
val p = first & Int.MaxValue
workerIdOptimized(nodeAssignmentId = p, workerAssignmentId = loadBalanceId)
}
}
} else if (first > 0) {
// First represents the subject and we use it for node assignment.
workerIdOptimized(nodeAssignmentId = first, workerAssignmentId = loadBalanceId)
} else {
// Subject is a wildcard, we use whatever is in second for node assignment.
val predicateOrObject = second & Int.MaxValue
workerIdOptimized(nodeAssignmentId = predicateOrObject, workerAssignmentId = loadBalanceId)
}
}
/**
* Asserts that both nodeAssignmentId and nodeBalanceId
* are larger than or equal to zero.
*/
@inline final def workerIdOptimized(nodeAssignmentId: Int, workerAssignmentId: Int): Int = {
val nodeId = nodeAssignmentId % numberOfNodes
val workerOnNode = workerAssignmentId % workersPerNode
nodeId * workersPerNode + workerOnNode
}
def getWorkerIdForVertexIdHash(vertexIdHash: Int): Int = throw new UnsupportedOperationException("This mapper does not support mapping by vertex hash.")
}
object LoadBalancingTripleMapperFactory extends MapperFactory[Long] {
def createInstance(numberOfNodes: Int, workersPerNode: Int) = new LoadBalancingTripleMapper(numberOfNodes, workersPerNode)
}
| jacqueslk/triplerush-filter | src/main/scala/com/signalcollect/triplerush/LoadBalancingTripleMapper.scala | Scala | apache-2.0 | 3,176 |
package org.scalatest.examples.funsuite.getfixture
import org.scalatest.FunSuite
import collection.mutable.ListBuffer
class ExampleSuite extends FunSuite {
def fixture =
new {
val builder = new StringBuilder("ScalaTest is ")
val buffer = new ListBuffer[String]
}
test("Testing should be easy") {
val f = fixture
f.builder.append("easy!")
assert(f.builder.toString === "ScalaTest is easy!")
assert(f.buffer.isEmpty)
f.buffer += "sweet"
}
test("Testing should be fun") {
val f = fixture
f.builder.append("fun!")
assert(f.builder.toString === "ScalaTest is fun!")
assert(f.buffer.isEmpty)
}
} | hubertp/scalatest | examples/src/main/scala/org/scalatest/examples/funsuite/getfixture/ExampleSuite.scala | Scala | apache-2.0 | 665 |
package leo.modules.agent.rules
import leo.datastructures.blackboard.{Blackboard, DataStore, DataType, ImmutableDelta}
/**
* Wrapper for an agent graph.
*
* Contains a list of all connected rules
* and the corresponding data structures,
* as well as the entry point for new data.
*
*/
trait RuleGraph[In, Out] {
/**
* All rules
* (edges) of the graph.
*
* @return rules of the graph
*/
def rules : Iterable[Rule]
/**
* All datastructures
* (vertices) of the graph
*
* @return datastructures of the graph
*/
def dataStructures : Iterable[DataStore]
/**
*
* The initial type data has to be inserted.
*
* @return initial type of data
*/
def initType : DataType[In]
/**
* The datatype of the result.
*
* @return type of the data storing results
*/
def outType : DataType[Out]
/**
* Contains all DataTypes used throughout the graph
*/
lazy val containedTypes : Iterable[DataType[Any]] = {
rules.flatMap{x => x.inTypes concat x.outTypes}
}
/**
* Registers the complete graph in the blackboard.
* All agents get their initi data as one compressed delta.
*
* If an empty initSet is passed, initial data can still be
* passed into the graph.
*
* @param initSet Set of initial data
* @param blackoard The blackboard all action is sceduled over
*/
def initGraph(initSet : Iterable[In])(implicit blackoard : Blackboard) = {
// Add DS
dataStructures foreach (x => blackoard.addDS(x))
// Add Rules
rules foreach (x => blackoard.registerAgent(new RuleAgent(x)))
// Add Data
val delta = new ImmutableDelta(Map(initType -> initSet.toSeq))
blackoard.submitDelta(delta)
}
def fetchResult(implicit blackboard: Blackboard) : Iterable[Out] = {
blackboard.getData(outType)
}
}
| leoprover/Leo-III | oldsrc/main/scala/leo/modules/agent/rules/RuleGraph.scala | Scala | bsd-3-clause | 1,866 |
class ClassParametersRepeated(args: String*) {
def arguments = argString(args)
def argString(as: Seq[String]): String =
if (as.isEmpty) ""
else as.head + " " + argString(as.tail)
}
| grzegorzbalcerek/scala-book-examples | examples/ClassParametersRepeated.scala | Scala | mit | 193 |
package dotty.tools.scaladoc
package tasty.comments
import scala.jdk.CollectionConverters._
import com.vladsch.flexmark.util.{ast => mdu}
object dbg:
case class See(n: mdu.Node, c: Seq[See]) {
def show(sb: StringBuilder, indent: Int): Unit = {
sb ++= " " * indent
sb ++= n.toString
sb ++= "\\n"
c.foreach { s => s.show(sb, indent + 2) }
}
override def toString = {
val sb = new StringBuilder
show(sb, 0)
sb.toString
}
}
def see(n: mdu.Node): See =
See(n, n.getChildIterator.asScala.map(see).toList)
| dotty-staging/dotty | scaladoc/src/dotty/tools/scaladoc/tasty/comments/package.scala | Scala | apache-2.0 | 571 |
/* Copyright 2009-2016 EPFL, Lausanne */
package leon
package utils
import purescala.Common._
import purescala.Definitions._
import purescala.Expressions._
import purescala.Extractors._
import purescala.Constructors._
import purescala.Types._
// FIXME: Unused and untested
object UnitElimination extends TransformationPhase {
val name = "Unit Elimination"
val description = "Remove all usage of the Unit type and value"
private var fun2FreshFun: Map[FunDef, FunDef] = Map()
private var id2FreshId: Map[Identifier, Identifier] = Map()
def apply(ctx: LeonContext, pgm: Program): Program = {
val newUnits = pgm.units map { u => u.copy(defs = u.defs.map {
case m: ModuleDef =>
fun2FreshFun = Map()
val allFuns = m.definedFunctions
//first introduce new signatures without Unit parameters
allFuns.foreach(fd => {
if(fd.returnType != UnitType && fd.params.exists(vd => vd.getType == UnitType)) {
val freshFunDef = fd.duplicate(params = fd.params.filterNot(vd => vd.getType == UnitType))
fun2FreshFun += (fd -> freshFunDef)
} else {
fun2FreshFun += (fd -> fd) //this will make the next step simpler
}
})
//then apply recursively to the bodies
val newFuns = allFuns.collect{ case fd if fd.returnType != UnitType =>
val newFd = fun2FreshFun(fd)
newFd.fullBody = removeUnit(fd.fullBody)
newFd
}
ModuleDef(m.id, m.definedClasses ++ newFuns, m.isPackageObject )
case d =>
d
})}
Program(newUnits)
}
private def simplifyType(tpe: TypeTree): TypeTree = tpe match {
case TupleType(tpes) => tupleTypeWrap(tpes.map(simplifyType).filterNot{ _ == UnitType })
case t => t
}
//remove unit value as soon as possible, so expr should never be equal to a unit
private def removeUnit(expr: Expr): Expr = {
assert(expr.getType != UnitType)
expr match {
case fi@FunctionInvocation(tfd, args) =>
val newArgs = args.filterNot(arg => arg.getType == UnitType)
FunctionInvocation(fun2FreshFun(tfd.fd).typed(tfd.tps), newArgs).setPos(fi)
case IsTyped(Tuple(args), TupleType(tpes)) =>
val newArgs = tpes.zip(args).collect {
case (tp, arg) if tp != UnitType => arg
}
tupleWrap(newArgs.map(removeUnit)) // @mk: FIXME this may actually return a Unit, is that cool?
case ts@TupleSelect(t, index) =>
val TupleType(tpes) = t.getType
val simpleTypes = tpes map simplifyType
val newArity = tpes.count(_ != UnitType)
val newIndex = simpleTypes.take(index).count(_ != UnitType)
tupleSelect(removeUnit(t), newIndex, newArity)
case Let(id, e, b) =>
if(id.getType == UnitType)
removeUnit(b)
else {
id.getType match {
case TupleType(tpes) if tpes.contains(UnitType) => {
val newTupleType = tupleTypeWrap(tpes.filterNot(_ == UnitType))
val freshId = FreshIdentifier(id.name, newTupleType)
id2FreshId += (id -> freshId)
val newBody = removeUnit(b)
id2FreshId -= id
Let(freshId, removeUnit(e), newBody)
}
case _ => Let(id, removeUnit(e), removeUnit(b))
}
}
case LetDef(fds, b) =>
val nonUnits = fds.filter(fd => fd.returnType != UnitType)
if(nonUnits.isEmpty) {
removeUnit(b)
} else {
val fdtoFreshFd = for(fd <- nonUnits) yield {
val m = if(fd.params.exists(vd => vd.getType == UnitType)) {
val freshFunDef = fd.duplicate(params = fd.params.filterNot(vd => vd.getType == UnitType))
fd -> freshFunDef
} else {
fd -> fd
}
fun2FreshFun += m
m
}
for((fd, freshFunDef) <- fdtoFreshFd) {
if(fd.params.exists(vd => vd.getType == UnitType)) {
freshFunDef.fullBody = removeUnit(fd.fullBody)
} else {
fd.body = fd.body.map(b => removeUnit(b))
}
}
val rest = removeUnit(b)
val newFds = for((fd, freshFunDef) <- fdtoFreshFd) yield {
fun2FreshFun -= fd
if(fd.params.exists(vd => vd.getType == UnitType)) {
freshFunDef
} else {
fd
}
}
letDef(newFds, rest)
}
case ite@IfExpr(cond, tExpr, eExpr) =>
val thenRec = removeUnit(tExpr)
val elseRec = removeUnit(eExpr)
IfExpr(removeUnit(cond), thenRec, elseRec)
case v @ Variable(id) =>
if(id2FreshId.isDefinedAt(id))
Variable(id2FreshId(id))
else v
case m @ MatchExpr(scrut, cses) =>
val scrutRec = removeUnit(scrut)
val csesRec = cses.map{ cse =>
MatchCase(cse.pattern, cse.optGuard map removeUnit, removeUnit(cse.rhs))
}
matchExpr(scrutRec, csesRec).setPos(m)
case Operator(args, recons) =>
recons(args.map(removeUnit))
case _ => sys.error("not supported: " + expr)
}
}
}
| regb/leon | src/main/scala/leon/utils/UnitElimination.scala | Scala | gpl-3.0 | 5,215 |
package com.alanjz.meerkat.app.menu.file
import com.alanjz.meerkat.app.menu.MCMenu
object MCFileMenu extends MCMenu("File") {
this.add(MCExit)
}
| spacenut/meerkat-chess | src/com/alanjz/meerkat/app/menu/file/MCFileMenu.scala | Scala | gpl-2.0 | 149 |
package usbinstall.controllers
import com.typesafe.scalalogging.StrictLogging
import javafx.fxml.{FXML, FXMLLoader, Initializable}
import javafx.scene.{Parent, Scene}
import javafx.scene.control.{Label, Tab, TabPane, TextArea}
import javafx.scene.layout.{AnchorPane, GridPane, VBox}
import javafx.stage.{Modality, Stage}
import suiryc.scala.concurrent.{Cancellable, CancellableFuture, Cancelled}
import suiryc.scala.javafx.beans.value.RichObservableValue._
import suiryc.scala.javafx.concurrent.JFXSystem
import suiryc.scala.javafx.scene.control.{Dialogs, LogArea}
import suiryc.scala.javafx.stage.{Stages => sfxStages}
import suiryc.scala.log.ThresholdLogLinePatternWriter
import usbinstall.{HasEventSubscriptions, InstallUI, InstallationException, StepPane, USBInstall, UseStepPane}
import usbinstall.os.{OSInstall, OSKind}
import usbinstall.settings.{ErrorAction, InstallSettings, Settings}
import java.net.URL
import java.util.ResourceBundle
import scala.annotation.nowarn
import scala.util.{Failure, Success}
class InstallController
extends Initializable
with UseStepPane
with HasEventSubscriptions
with StrictLogging
{
@FXML
protected var vbox: VBox = _
@FXML
protected var grid: GridPane = _
@FXML
protected var step: Label = _
@FXML
protected var action: Label = _
@FXML
protected var activityArea: TextArea = _
@FXML
protected var logPanes: TabPane = _
@FXML
protected var installTab: Tab = _
protected var activityLogArea: LogArea = _
protected var stepPane: StepPane = _
// Note: subscriptions on external object need to be cancelled for
// pane/scene to be GCed.
// Note: we need to wait for 'initialize' to get the JavaFX controls.
protected var ui: InstallUI = _
protected var cancellableFuture: CancellableFuture[List[String]] = _
protected var installLogWriter: ThresholdLogLinePatternWriter = _
private val profile = InstallSettings.profile.get.get
override def initialize(fxmlFileLocation: URL, resources: ResourceBundle): Unit = {
activityLogArea = LogArea(activityArea)
installLogWriter = activityLogArea.msgWriter
installLogWriter.setPattern(Settings.core.logInstallPattern)
USBInstall.addLogWriter(installLogWriter)
installLogWriter.setThreshold(Settings.core.logInstallThreshold.get.level)
subscriptions ::= Settings.core.logInstallThreshold.listen { v =>
installLogWriter.setThreshold(v.level)
}
ui = new InstallUI(step, action, activityLogArea, None)
subscriptions ::= USBInstall.stage.widthProperty().listen { width =>
logPanes.setMaxWidth(width.asInstanceOf[Double])
}
}
private def taskDone(): Unit = {
USBInstall.removeLogWriter(installLogWriter)
// First enable 'Previous' and disable 'Cancel'
JFXSystem.schedule {
stepPane.previous.disable = false
stepPane.next.disable = true
}
// Then replace 'Cancel' by 'Done'
JFXSystem.schedule {
stepPane.next.label = "Done"
stepPane.next.onTrigger = () => {
onDone()
true
}
stepPane.next.disable = false
}
}
private def taskFailed(ex: Throwable): Unit = {
val (log, notified) = ex match {
case _: Cancelled =>
// Activity area already notified
Dialogs.warning(
owner = Some(USBInstall.stage),
title = Some("Installation failed"),
contentText = Some("Installation was cancelled")
)
(false, true)
case InstallationException(_, _, n) =>
(true, n)
case _ =>
(true, false)
}
if (log)
logger.error(s"Installation failed", ex)
if (!notified) {
Dialogs.error(
owner = Some(USBInstall.stage),
title = Some("Installation failed"),
ex = Some(ex)
)
}
taskDone()
}
override def setStepPane(stepPane: StepPane): Unit = {
this.stepPane = stepPane
// Note: since we access stepPane upon completion, we need to set it first
// and cannot start installing upon 'initialize'.
// In case an error message needs to be shown immediately, it is best to
// wait for this stage to be shown before starting installing.
def install(): Unit = {
import scala.concurrent.ExecutionContext.Implicits.global
cancellableFuture = CancellableFuture(installTask)
cancellableFuture.future.onComplete {
case Failure(ex) =>
taskFailed(ex)
case Success(failedOSes) =>
logger.info(s"Task ended")
taskDone()
if (failedOSes.isEmpty) {
Dialogs.information(
owner = Some(USBInstall.stage),
title = Some("Installation done"),
contentText = Some("Installation ended without errors")
)
}
else {
Dialogs.warning(
owner = Some(USBInstall.stage),
title = Some("Installation done"),
contentText = Some(s"Installation ended.\\n\\nThe following elements failed:\\n${failedOSes.mkString(", ")}")
)
}
}
}
USBInstall.stage.showingProperty().listen2 { (subscription, showing) =>
// Note: the stage content is created before hiding the previous one, so
// we get hiding first, then showing.
if (showing) {
if (USBInstall.stage.getScene eq vbox.getScene) {
install()
}
else {
// Will probably never happen, but we don't want to install if the
// stage scene is not the expected one.
Dialogs.warning(
owner = Some(USBInstall.stage),
title = Some("Unexpected situation"),
contentText = Some("Displayed window does not appear to be the expected one (installation)!")
)
}
subscription.cancel()
}
}
()
}
private def installTask(cancellable: Cancellable): List[String] = {
def checkCancelled(): Unit =
cancellable.check {
activityLogArea.write("Cancelled")
ui.activity("Cancelled")
}
def switchLogWriter(previous: ThresholdLogLinePatternWriter, next: ThresholdLogLinePatternWriter): Unit = {
if (!(next eq previous)) {
USBInstall.addLogWriter(next)
USBInstall.removeLogWriter(previous)
}
}
ui.activity(s"Temp path[${InstallSettings.pathTemp}]")
ui.activity(s"ISO mount path[${InstallSettings.pathMountISO}]")
ui.activity(s"Partition mount path[${InstallSettings.pathMountPartition}]")
val (notsyslinux, syslinux) = profile.oses.partition(_.kind != OSKind.Syslinux)
val oses = notsyslinux ::: syslinux
val (previousTab, previousLogWriter, failedOses) =
oses.foldLeft[(Tab, ThresholdLogLinePatternWriter, List[String])]((installTab, installLogWriter, Nil)) { (previous, settings) =>
val (previousTab, previousLogWriter, previousFailedOSes) = previous
if (settings.isSelected) {
val osActivity = new TextArea()
osActivity.setWrapText(true)
val osLogArea = LogArea(osActivity)
ui.osActivity = Some(osLogArea)
val osLogWriter = osLogArea.msgWriter
osLogWriter.setPattern(Settings.core.logInstallPattern)
osLogWriter.setThreshold(Settings.core.logInstallThreshold.get.level)
subscriptions ::= Settings.core.logInstallThreshold.listen { v =>
osLogWriter.setThreshold(v.level)
}
switchLogWriter(previousLogWriter, osLogWriter)
val osTab = new Tab(settings.label)
JFXSystem.schedule {
val pane = new AnchorPane(osActivity)
AnchorPane.setTopAnchor(osActivity, 10.0)
AnchorPane.setRightAnchor(osActivity, 10.0)
AnchorPane.setBottomAnchor(osActivity, 10.0)
AnchorPane.setLeftAnchor(osActivity, 10.0)
osTab.setContent(pane)
logPanes.getTabs.add(osTab)
// Only select new tab if previous one is still selected
if (logPanes.getSelectionModel.getSelectedItem eq previousTab)
logPanes.getSelectionModel.select(osTab)
}
def resetAppender(): Unit = {
switchLogWriter(osLogWriter, installLogWriter)
}
val next = try {
val os = OSInstall(settings, ui, () => checkCancelled())
OSInstall.install(profile, os)
(osTab, osLogWriter, previousFailedOSes)
} catch {
case ex: Cancelled =>
resetAppender()
throw ex
case ex: Exception =>
logger.error(s"Failed to install ${settings.label}: ${ex.getMessage}", ex)
resetAppender()
def doSkip() =
(osTab, osLogWriter, previousFailedOSes :+ settings.label)
// @nowarn workarounds scala 2.13.x false-positive
(Settings.core.componentInstallError.get: @nowarn) match {
case ErrorAction.Ask =>
Dialogs.error(
owner = Some(USBInstall.stage),
title = Some("Installation failed"),
contentText = Some(s"Failed to install ${settings.label}"),
ex = Some(ex)
)
val action = JFXSystem.await(askOnFailure())
if (action != ErrorAction.Skip)
throw InstallationException(s"Failed to install ${settings.label}", ex, notified = true)
doSkip()
case ErrorAction.Stop =>
throw InstallationException(s"Failed to install ${settings.label}", ex)
case ErrorAction.Skip =>
// Nothing to do except go to next OS
doSkip()
}
}
finally {
ui.osActivity = None
}
next
}
else previous
}
switchLogWriter(previousLogWriter, installLogWriter)
// Only get back to initial tab if previous one is still selected
if (logPanes.getSelectionModel.getSelectedItem eq previousTab) JFXSystem.schedule {
logPanes.getSelectionModel.select(installTab)
}
failedOses
}
private def askOnFailure(): ErrorAction.Value = {
val loader = new FXMLLoader(getClass.getResource("/fxml/installFailure.fxml"))
val options = loader.load[Parent]()
val controller = loader.getController[InstallFailureController]()
val stage = new Stage
stage.setTitle("Installation failure")
stage.setScene(new Scene(options))
stage.initModality(Modality.WINDOW_MODAL)
sfxStages.initOwner(stage, vbox.getScene.getWindow)
sfxStages.onStageReady(stage, first = false) {
sfxStages.setMinimumDimensions(stage)
}
stage.showAndWait()
val action = controller.getAction
if (controller.getAsDefault)
Settings.core.componentInstallError.set(action)
action
}
def onCancel(): Unit = {
// Note: we are in the JavaFX thread
ui.activity("Cancelling ...")
activityLogArea.write("Cancelling ...")
stepPane.next.disable = true
Option(cancellableFuture).fold {
taskFailed(Cancelled())
} {
_.cancel()
}
}
def onDone(): Unit = {
import javafx.stage.WindowEvent
// Note: we are in the JavaFX thread
USBInstall.stage.fireEvent(new WindowEvent(null, WindowEvent.WINDOW_CLOSE_REQUEST))
}
}
| suiryc/usbinstall | src/main/scala/usbinstall/controllers/InstallController.scala | Scala | gpl-3.0 | 11,266 |
/*
*/
package see
import org.junit._
/** Tests examples from documentation.
*/
//@Ignore
class ExampleTest extends TestCase {
@Test
def testObfuscated() {
println("Obfuscated")
val prog = """
and = 1;
xor = 2;
or = 3;
if = and or xor xor and or not or and and or xor;
"""
expect(prog, 3)
}
@Test
def testPatternSample() {
println("Pattern Sample")
val prog = """
pred(x) := { 10 < x < 100 };
a = 101;
x = 20;
y = x ?~ "abc" -> 0x41L :
'\\d+\\*a' -> a * int($~~~'(\\d*).'@1) :
111 -> 112 :
a -> a + 1:
$ < 0 -> $ + 1:
pred -> 2 * $:
Number -> bigint($) :
? -> 0L ; // note semicolon after last alternative!
"""
expect(prog, 40)
}
@Test
def testReturnSample() {
println("Return Sample")
val prog = """
x = -1;
y = {
x <= 0 ?= 0;
log10(x); // not executed
}; // will end up here
10 * y;
// not here!
"""
expect(prog, 0)
}
@Test
def testAssertSample() {
println("Assert Sample")
val prog = """
x = 0;
{
y = 10 * {
x > 0 ?! "Undefined log10() operand";
log10(x); // not executed
};
y += 5 // not executed
}! // error catched here
0 // result returned in case of error
}
"""
expect(prog, 0)
}
} | acruise/see | src/test/scala/see/ExampleTest.scala | Scala | bsd-3-clause | 1,445 |
package pdi.jwt
object JwtHeader {
val DEFAULT_TYPE = "JWT"
def apply(
algorithm: Option[JwtAlgorithm] = None,
typ: Option[String] = None,
contentType: Option[String] = None,
keyId: Option[String] = None
) = new JwtHeader(algorithm, typ, contentType, keyId)
def apply(algorithm: Option[JwtAlgorithm]): JwtHeader = algorithm match {
case Some(algo) => JwtHeader(algo)
case _ => new JwtHeader(None, None, None, None)
}
def apply(algorithm: JwtAlgorithm): JwtHeader =
new JwtHeader(Option(algorithm), Option(DEFAULT_TYPE), None, None)
def apply(algorithm: JwtAlgorithm, typ: String): JwtHeader =
new JwtHeader(Option(algorithm), Option(typ), None, None)
def apply(algorithm: JwtAlgorithm, typ: String, contentType: String): JwtHeader =
new JwtHeader(Option(algorithm), Option(typ), Option(contentType), None)
def apply(algorithm: JwtAlgorithm, typ: String, contentType: String, keyId: String): JwtHeader =
new JwtHeader(Option(algorithm), Option(typ), Option(contentType), Option(keyId))
}
class JwtHeader(
val algorithm: Option[JwtAlgorithm],
val typ: Option[String],
val contentType: Option[String],
val keyId: Option[String]
) {
def toJson: String = JwtUtils.hashToJson(
Seq(
"typ" -> typ,
"alg" -> algorithm.map(_.name).orElse(Option("none")),
"cty" -> contentType,
"kid" -> keyId
).collect { case (key, Some(value)) =>
(key -> value)
}
)
/** Assign the type to the header */
def withType(typ: String): JwtHeader = {
JwtHeader(algorithm, Option(typ), contentType, keyId)
}
/** Assign the default type `JWT` to the header */
def withType: JwtHeader = this.withType(JwtHeader.DEFAULT_TYPE)
/** Assign a key id to the header */
def withKeyId(keyId: String): JwtHeader = {
JwtHeader(algorithm, typ, contentType, Option(keyId))
}
// equality code
def canEqual(other: Any): Boolean = other.isInstanceOf[JwtHeader]
override def equals(other: Any): Boolean = other match {
case that: JwtHeader =>
(that canEqual this) &&
algorithm == that.algorithm &&
typ == that.typ &&
contentType == that.contentType &&
keyId == that.keyId
case _ => false
}
override def hashCode(): Int = {
val state = Seq(algorithm, typ, contentType, keyId)
state.map(_.hashCode()).foldLeft(0)((a, b) => 31 * a + b)
}
override def toString: String = s"JwtHeader($algorithm, $typ, $contentType, $keyId)"
}
| pauldijou/jwt-scala | core/src/main/scala/JwtHeader.scala | Scala | apache-2.0 | 2,513 |
package com.wordnik
package object swagger {
object annotations {
import scala.annotation.meta.field
@deprecated("because of swagger spec 1.2 this got renamed to ApiModelProperty", "2.2.2")
type ApiProperty = com.wordnik.swagger.runtime.annotations.ApiModelProperty @field
type ApiModelProperty = com.wordnik.swagger.runtime.annotations.ApiModelProperty @field
type ApiModel = com.wordnik.swagger.runtime.annotations.ApiModel
type XmlRootElement = javax.xml.bind.annotation.XmlRootElement
type ApiEnum = com.wordnik.swagger.runtime.annotations.ApiEnum
@deprecated("In swagger spec 1.2 this was replaced with com.wordnik.swagger.ResponseMessage", "2.2.2")
type Error = com.wordnik.swagger.ResponseMessage[String]
}
private[swagger] implicit class RicherString(s: String) {
def isBlank = s == null || s.trim.isEmpty
def nonBlank = !isBlank
def blankOption = if (isBlank) None else Some(s)
def toCheckboxBool = s.toUpperCase match {
case "ON" | "TRUE" | "OK" | "1" | "CHECKED" | "YES" | "ENABLE" | "ENABLED" => true
case _ => false
}
}
}
| swagger-api/swagger-scala | src/main/scala/com/wordnik/swagger/package.scala | Scala | apache-2.0 | 1,119 |
/*
* Copyright 2014 http4s.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.http4s.blaze.http.http2
import java.nio.ByteBuffer
import org.http4s.blaze.http.HttpClientSession
import org.http4s.blaze.http.HttpClientSession.Status
import org.http4s.blaze.pipeline.Command.EOF
import org.http4s.blaze.pipeline.{HeadStage, LeafBuilder, TailStage}
import org.http4s.blaze.util.{BufferTools, Execution, SerialExecutionContext}
import scala.annotation.tailrec
import scala.concurrent.{ExecutionContext, Future, Promise}
import scala.concurrent.duration.Duration
import scala.util.{Failure, Success}
/** Representation of the HTTP/2 connection.
*
* @note
* the TailStage needs to be ready to go as this session will start reading from the channel
* immediately.
*/
private final class ConnectionImpl(
tailStage: TailStage[ByteBuffer],
val localSettings: Http2Settings,
val remoteSettings: MutableHttp2Settings,
flowStrategy: FlowStrategy,
inboundStreamBuilder: Option[Int => LeafBuilder[StreamFrame]],
parentExecutor: ExecutionContext
) extends SessionCore
with Connection {
// Shortcut methods
private[this] def isClient = inboundStreamBuilder.isEmpty
private[this] val logger = org.log4s.getLogger
private[this] val closedPromise = Promise[Unit]()
private[this] val frameDecoder = new FrameDecoder(
localSettings,
new SessionFrameListener(
this,
isClient,
new HeaderDecoder(
maxHeaderListSize = localSettings.maxHeaderListSize,
discardOverflowHeaders = true,
maxTableSize = localSettings.headerTableSize
)
)
)
@volatile
private[this] var currentState: Connection.State = Connection.Running
private[this] var sentGoAway = false
override val serialExecutor = new SerialExecutionContext(parentExecutor) {
override def reportFailure(cause: Throwable): Unit =
invokeShutdownWithError(Some(cause), "SerialExecutor")
}
override val http2Encoder =
new FrameEncoder(remoteSettings, new HeaderEncoder(remoteSettings.maxHeaderListSize))
override val idManager: StreamIdManager = StreamIdManager(isClient)
override val writeController =
new WriteControllerImpl(this, 64 * 1024, tailStage)
override val pingManager: PingManager = new PingManager(this)
override val sessionFlowControl: SessionFlowControl =
new SessionFlowControlImpl(this, flowStrategy)
override val streamManager: StreamManager =
new StreamManagerImpl(this, inboundStreamBuilder)
// start read loop and add shutdown hooks
readLoop(BufferTools.emptyBuffer)
// Make sure we disconnect from the reactor once the session is done
onClose.onComplete(_ => tailStage.closePipeline(None))(parentExecutor)
private[this] def readLoop(remainder: ByteBuffer): Unit =
// the continuation must be run in the sessionExecutor
tailStage
.channelRead()
.onComplete {
// This completion is run in the sessionExecutor so its safe to
// mutate the state of the session.
case Failure(ex) => invokeShutdownWithError(Some(ex), "readLoop-read")
case Success(next) =>
logger.debug(s"Read data: $next")
val data = BufferTools.concatBuffers(remainder, next)
logger.debug("Handling inbound data.")
@tailrec
def go(): Unit =
frameDecoder.decodeBuffer(data) match {
case Continue => go()
case BufferUnderflow => readLoop(data)
case Error(ex: Http2StreamException) =>
// If the stream is still active, it will write the RST.
// Otherwise, we need to do it here.
streamManager.get(ex.stream) match {
case Some(stream) =>
stream.doCloseWithError(Some(ex))
case None =>
val msg = FrameSerializer.mkRstStreamFrame(ex.stream, ex.code)
writeController.write(msg)
()
}
case Error(ex) =>
invokeShutdownWithError(Some(ex), "readLoop-decode")
}
go()
}(serialExecutor)
override def quality: Double =
// Note that this is susceptible to memory visibility issues
// but that's okay since this is intrinsically racy.
if (state.closing || !idManager.unusedOutboundStreams) 0.0
else {
val maxConcurrent = remoteSettings.maxConcurrentStreams
val currentStreams = activeStreams
if (maxConcurrent == 0 || maxConcurrent <= currentStreams) 0.0
else 1.0 - (currentStreams.toDouble / maxConcurrent.toDouble)
}
override def status: Status =
state match {
case Connection.Draining => HttpClientSession.Busy
case Connection.Closed => HttpClientSession.Closed
case Connection.Running =>
if (quality == 0.0) HttpClientSession.Busy
else HttpClientSession.Ready
}
override def activeStreams: Int = streamManager.size
override def ping(): Future[Duration] = {
val p = Promise[Duration]()
serialExecutor.execute(new Runnable {
def run(): Unit = {
p.completeWith(pingManager.ping())
()
}
})
p.future
}
override def drainSession(gracePeriod: Duration): Future[Unit] = {
serialExecutor.execute(new Runnable {
def run(): Unit = invokeDrain(gracePeriod)
})
onClose
}
override def newOutboundStream(): HeadStage[StreamFrame] =
streamManager.newOutboundStream()
override def onClose: Future[Unit] = closedPromise.future
override def state: Connection.State = currentState
// Must be called from within the session executor.
// If an error is provided, a GOAWAY is written and we wait for the writeController to
// close the connection. If not, we do it.
override def invokeShutdownWithError(ex: Option[Throwable], phase: String): Unit =
if (state != Connection.Closed) {
currentState = Connection.Closed
val http2Ex: Option[Http2Exception] = ex match {
case None | Some(EOF) => None
case Some(e: Http2Exception) => Some(e)
case Some(other) =>
logger.warn(other)(s"Shutting down HTTP/2 with unhandled exception in phase $phase")
Some(Http2Exception.INTERNAL_ERROR.goaway("Unhandled internal exception"))
}
streamManager.forceClose(http2Ex) // Fail hard
sendGoAway(http2Ex.getOrElse(Http2Exception.NO_ERROR.goaway(s"No Error")))
writeController
.close()
.onComplete { _ =>
tailStage.closePipeline(None)
ex match {
case Some(ex) => closedPromise.failure(ex)
case None => closedPromise.success(())
}
}(serialExecutor)
}
override def invokeDrain(gracePeriod: Duration): Unit =
if (currentState == Connection.Running) {
// Start draining: send a GOAWAY and set a timer to force shutdown
val noError = Http2Exception.NO_ERROR.goaway(s"Session draining for duration $gracePeriod")
sendGoAway(noError)
// Drain the StreamManager. We are going to reject our own outbound streams too
doDrain(idManager.lastOutboundStream, noError)
val work = new Runnable {
// We already drained so no error necessary
def run(): Unit =
invokeShutdownWithError(None, s"drainSession($gracePeriod)")
}
// We don't want to leave the timer set since we don't know know long it will live
val c = Execution.scheduler.schedule(work, serialExecutor, gracePeriod)
onClose.onComplete(_ => c.cancel())(Execution.directec)
}
override def invokeGoAway(lastHandledOutboundStream: Int, error: Http2SessionException): Unit = {
// We drain all the streams so we send the remote peer a GOAWAY as well
sendGoAway(Http2Exception.NO_ERROR.goaway(s"Session received GOAWAY with code ${error.code}"))
doDrain(lastHandledOutboundStream, error)
}
private[this] def doDrain(lastHandledOutboundStream: Int, error: Http2SessionException): Unit =
if (currentState != Connection.Closed) {
currentState = Connection.Draining
// Drain the `StreamManager` and then the `WriteController`, then close up.
streamManager
.drain(lastHandledOutboundStream, error)
.flatMap(_ => writeController.close())(serialExecutor)
.onComplete(_ => invokeShutdownWithError(None, /* unused */ ""))(serialExecutor)
}
private[this] def sendGoAway(ex: Http2Exception): Unit =
if (!sentGoAway) {
sentGoAway = true
val frame = FrameSerializer.mkGoAwayFrame(idManager.lastInboundStream, ex)
writeController.write(frame)
()
}
}
| http4s/blaze | http/src/main/scala/org/http4s/blaze/http/http2/ConnectionImpl.scala | Scala | apache-2.0 | 9,187 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming
import java.text.SimpleDateFormat
import com.codahale.metrics.{Gauge, MetricRegistry}
import org.apache.spark.internal.Logging
import org.apache.spark.metrics.source.{Source => CodahaleSource}
import org.apache.spark.sql.catalyst.util.DateTimeUtils
import org.apache.spark.sql.streaming.StreamingQueryProgress
/**
* Serves metrics from a [[org.apache.spark.sql.streaming.StreamingQuery]] to
* Codahale/DropWizard metrics
*/
class MetricsReporter(
stream: StreamExecution,
override val sourceName: String) extends CodahaleSource with Logging {
override val metricRegistry: MetricRegistry = new MetricRegistry
// Metric names should not have . in them, so that all the metrics of a query are identified
// together in Ganglia as a single metric group
registerGauge("inputRate-total", _.inputRowsPerSecond, 0.0)
registerGauge("processingRate-total", _.processedRowsPerSecond, 0.0)
registerGauge("latency", _.durationMs.getOrDefault("triggerExecution", 0L).longValue(), 0L)
private val timestampFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'") // ISO8601
timestampFormat.setTimeZone(DateTimeUtils.getTimeZone("UTC"))
registerGauge("eventTime-watermark",
progress => convertStringDateToMillis(progress.eventTime.get("watermark")), 0L)
registerGauge("states-rowsTotal", _.stateOperators.map(_.numRowsTotal).sum, 0L)
registerGauge("states-usedBytes", _.stateOperators.map(_.memoryUsedBytes).sum, 0L)
private def convertStringDateToMillis(isoUtcDateStr: String) = {
if (isoUtcDateStr != null) {
timestampFormat.parse(isoUtcDateStr).getTime
} else {
0L
}
}
private def registerGauge[T](
name: String,
f: StreamingQueryProgress => T,
default: T): Unit = {
synchronized {
metricRegistry.register(name, new Gauge[T] {
override def getValue: T = Option(stream.lastProgress).map(f).getOrElse(default)
})
}
}
}
| ueshin/apache-spark | sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/MetricsReporter.scala | Scala | apache-2.0 | 2,786 |
package com.codebook.akka.actor.message
case class SetMessage(key: String, value: Object)
| flopezlasanta/akka-services | src/main/scala/com/codebook/akka/actor/message/SetMessage.scala | Scala | mit | 91 |
package se.lu.nateko.cp.meta.onto.labeler
import org.semanticweb.owlapi.model.{IRI => OWLIRI, _}
import org.eclipse.rdf4j.model.IRI
import se.lu.nateko.cp.meta.instanceserver.InstanceServer
import org.eclipse.rdf4j.model.Literal
class MultiComponentIndividualLabeler(
components: Seq[DisplayComponent],
inner: InstanceLabeler
) extends InstanceLabeler {
private val compMakers = components.map{
case DataPropComponent(prop) => getComponent(prop) _
case ObjectPropComponent(prop) => getComponent(prop) _
case ConstantComponent(value) => (_: IRI, _: InstanceServer) => value
}
override def getLabel(instUri: IRI, instServer: InstanceServer): String = {
val labelComponents = compMakers.map(_(instUri, instServer))
val nonEmptyExists: Boolean = labelComponents.zip(components).exists{
case (_, ConstantComponent(_)) => false
case (label, _) => label.nonEmpty
}
if(nonEmptyExists)
Labeler.joinComponents(labelComponents)
else
super.getLabel(instUri, instServer)
}
private def getComponent(propIri: OWLIRI)(instUri: IRI, instServer: InstanceServer): String = {
val propUri = toUri(propIri, instServer)
val values = instServer.getValues(instUri, propUri).collect{
case literal: Literal => literal.getLabel
}
Labeler.joinMultiValues(values)
}
private def getComponent(prop: OWLObjectProperty)(instUri: IRI, instServer: InstanceServer): String = {
val propUri = toUri(prop.getIRI, instServer)
val values = instServer.getValues(instUri, propUri).collect{
case uri: IRI => inner.getLabel(uri, instServer)
}
Labeler.joinMultiValues(values)
}
private def toUri(prop: OWLIRI, instServer: InstanceServer): IRI =
instServer.factory.createIRI(prop.toURI.toString)
}
| ICOS-Carbon-Portal/meta | src/main/scala/se/lu/nateko/cp/meta/onto/labeler/MultiComponentIndividualLabeler.scala | Scala | gpl-3.0 | 1,724 |
package io.github.raptros.bson
import com.mongodb.{BasicDBList, DBObject}
import scalaz._
import scalaz.syntax.id._
import scalaz.syntax.std.boolean._
import scalaz.syntax.validation._
import scalaz.syntax.traverse._
import scala.reflect._
import org.joda.time.DateTime
import scalaz.std.list._
/** the typeclass of things that can be decoded from DBObjects.
* @tparam A not certain what effect the covariance on A has.
*/
trait DecodeBson[+A] {
/** this is the method that needs to be implemented to decode.
* @param dbo the object to attempt to decode an A from.
* @return a [[DecodeResult]] with A as the success.
*/
def decode(dbo: DBObject): DecodeResult[A]
/** an alias for [[decode]] */
def apply(dbo: DBObject): DecodeResult[A] = decode(dbo)
/** derives a new DecodeBson from this one by transforming the values decoded from this one
* @param f a function that transforms values produced by this DecodeBson
* @tparam B the type that `f` returns
* @return a DecodeBson for `B`
*/
def map[B](f: A => B): DecodeBson[B] = DecodeBson {
apply(_) map f
}
/** derives a new DecodeBson by transforming decoded values using a function that produces a new DecodeBson.
* @param f a function that produces a DecodeBson using the values produced by this one.
* the decoder returned by f will be applied to to the same dbo that this decoder extracted the value it passed to `f` from.
* the value returned by applying that new decoder is the value that the new decoder will return.
* @tparam B a type
* @return a decooder for B
*/
def flatMap[B](f: A => DecodeBson[B]): DecodeBson[B] = DecodeBson { dbo =>
apply(dbo) flatMap { r => f(r)(dbo) }
}
/** creates a decoder that first uses this decoder and then another decoder. has an alias [[|||]].
* @param x a decoder that will be tried if this one fails
* @tparam B a supertype of A that is common to both this decoder and `x`. this is the return type of the new decoder.
* @return a decoder that returns the first successfully decoded value
*/
def orElse[B >: A](x: => DecodeBson[B]): DecodeBson[B] = DecodeBson { dbo =>
apply(dbo) orElse x(dbo)
}
/** alias for [[orElse]] */
def |||[B >: A](x: => DecodeBson[B]): DecodeBson[B] = orElse(x)
/** creates a decoder that applies this decoder and `x` and returns both results together. */
def &&&[B](x: DecodeBson[B]): DecodeBson[(A, B)] = DecodeBson { dbo =>
for {
a <- apply(dbo)
b <- x(dbo)
} yield (a, b)
}
/** creates a decoder that applies a predicate to a dbo before it tries to apply this decoder.
* @param f a predicate on DBObjects.
* @param msg a function that takes the DBObject that failed the predicate, and produces a [[DecodeError]] explaining what went wrong.
*/
def validate(f: DBObject => Boolean, msg: DBObject => DecodeError) = DecodeBson { dbo =>
if (f(dbo))
decode(dbo)
else
msg(dbo).wrapNel.left
}
/** creates a decoder that only applies this decoder if the dbo has exactly `count` fields */
def validateFields(count: Int) = validate(_.keySet.size == count,
dbo => WrongFieldCount(count, dbo.keySet.size))
}
object DecodeBson extends DecodeBsons {
/** constructs [[DecodeBson]][A] by using `f` as the implementation of `decode` */
def apply[A](f: DBObject => DecodeResult[A]): DecodeBson[A] = new DecodeBson[A] {
def decode(dbo: DBObject): DecodeResult[A] = f(dbo)
}
}
/** this contains:
* - a couple DecodeBson instances
* - two Applicative implementations (one for DecodeResult, one for the [[scalaz.Validation]] equivalent)
* - a bunch of bdecode(n)f methods, which construct decoders from the combination of function that constructs a value from types that have
* [[DecodeBsonField]] instances, and a list of field names to extracted. n is the arity.
* - a bunch of bdecodeTuple(n) methods, which constructed decoders that extract the named fields into a tuple.
*/
trait DecodeBsons extends GeneratedDecodeBsons {
val ApD = Applicative[DecodeResult]
type DecodeResultV[A] = ValidationNel[DecodeError, A]
val ApV = Applicative[DecodeResultV]
import scala.reflect._
protected def tryCast[A: ClassTag](v: Any): DecodeError \/ A =
(v != null && !(classTag[A].runtimeClass isAssignableFrom v.getClass)) either WrongType(classTag[A].runtimeClass, v.getClass) or v.asInstanceOf[A]
implicit val dboDecodeBson: DecodeBson[DBObject] = DecodeBson { _.right }
implicit def listDecodeBson[A](implicit d: DecodeBsonField[A]) = DecodeBson { dbo =>
tryCast[BasicDBList](dbo) leftMap { NonEmptyList(_) } flatMap { dbl =>
//first: decode each item in the list
val decodes = (0 until dbl.size()) map { idx => d(idx.toString, dbl).validation }
//second: sequence the decode results - takes a list of decode results and makes it a decode result of a list
decodes.toList.sequence[DecodeResultV, A].disjunction
}
}
def bdecode1f[A, X](fxn: (A) => X)(ak: String)(implicit decodea: DecodeBsonField[A]): DecodeBson[X] =
DecodeBson { dbo => decodea(ak, dbo) map fxn }
}
| raptros/the-bson | core/src/main/scala/io/github/raptros/bson/DecodeBson.scala | Scala | bsd-3-clause | 5,168 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License,
*
* Contributors:
* Hao Jiang - initial API and implementation
*
*/
package edu.uchicago.cs.encsel.dataset.feature
import java.io._
import java.net.URI
import java.nio.file.{Files, Paths}
import edu.uchicago.cs.encsel.dataset.column.Column
import org.slf4j.LoggerFactory
import scala.collection.mutable.ArrayBuffer
import scala.io.Source
object Features {
val logger = LoggerFactory.getLogger(getClass)
val extractors = new ArrayBuffer[FeatureExtractor]()
install(ParquetEncFileSize)
install(Sparsity)
install(Entropy)
install(Length)
install(Distinct)
install(new Sortness(50))
install(new Sortness(100))
install(new Sortness(200))
install(AdjInvertPair)
def install(fe: FeatureExtractor) = {
extractors += fe
}
def extract(input: Column): Iterable[Feature] = {
extractors.flatMap(ex => {
try {
ex.extract(input)
} catch {
case e: Exception => {
logger.error("Exception while executing %s on %s:%s, skipping"
.format(ex.getClass.getSimpleName, input.origin, input.colName), e)
Iterable[Feature]()
}
}
})
}
def extract(input: Column,
filter: Iterator[String] => Iterator[String],
prefix: String): Iterable[Feature] = {
// Filter the file to URI
val filteredURI = new URI(input.colFile.toString + "." + prefix)
filterFile(input.colFile, filteredURI, filter)
val filteredColumn = new Column(input.origin, input.colIndex, input.colName, input.dataType)
filteredColumn.colFile = filteredURI
val extracted = extractors.filter(_.supportFilter).flatMap(ex => {
try {
ex.extract(filteredColumn, prefix)
} catch {
case e: Exception => {
logger.error("Exception while executing %s on %s:%s, skipping"
.format(ex.getClass.getSimpleName, input.origin, input.colName), e)
Iterable[Feature]()
}
}
})
// Delete the filtered file
Files.delete(Paths.get(filteredURI))
extracted
}
def filterFile(src: URI, target: URI, filter: Iterator[String] => Iterator[String]): Unit = {
val filteredWriter = new PrintWriter(new FileOutputStream(new File(target)))
val source = Source.fromFile(src)
try {
filter(source.getLines()).foreach(filteredWriter.println)
} finally {
source.close()
filteredWriter.close()
}
}
} | harperjiang/enc-selector | src/main/scala/edu/uchicago/cs/encsel/dataset/feature/Features.scala | Scala | apache-2.0 | 3,213 |
package com.twitter.app
import com.twitter.finagle.util.loadServiceIgnoredPaths
import java.io.{File, IOException}
import java.net.{URI, URISyntaxException, URL, URLClassLoader}
import java.nio.charset.MalformedInputException
import java.nio.file.Paths
import java.util.jar.{JarEntry, JarFile}
import scala.collection.mutable
import scala.collection.mutable.Builder
import scala.io.Source
import scala.jdk.CollectionConverters._
private[app] object ClassPath {
val IgnoredPackages: Set[String] = Set(
"apple/",
"ch/epfl/",
"com/apple/",
"com/oracle/",
"com/sun/",
"java/",
"javax/",
"scala/",
"sun/",
"sunw/"
)
sealed abstract class Info(path: String)
case class FlagInfo(path: String) extends Info(path) {
val className: String = {
val prefixed =
if (path.endsWith(".class")) path.dropRight(6)
else path
prefixed.replace('/', '.')
}
}
case class LoadServiceInfo(path: String, iface: String, lines: Seq[String]) extends Info(path)
}
/**
* Inspect and load the classpath. Inspired by Guava's ClassPath
* utility.
*
* @note This is not a generic facility, rather it is designed
* specifically to support GlobalFlag and LoadService.
*/
private[app] sealed abstract class ClassPath[CpInfo <: ClassPath.Info] {
protected def ignoredPackages: Set[String]
def browse(loader: ClassLoader): Seq[CpInfo] = {
val buf = Vector.newBuilder[CpInfo]
val seenUris = mutable.HashSet[URI]()
for ((uri, loader) <- getEntries(loader)) {
browseUri0(uri, loader, buf, seenUris)
seenUris += uri
}
buf.result()
}
// Note: In JDK 9+ URLClassLoader is no longer the default ClassLoader.
// This method allows us to scan URL's on the class path that can be used
// but does NOT include the ModulePath introduced in JDK9. This method is
// used as a bridge between JDK 8 and JDK 9+.
// The method used here is attributed to https://stackoverflow.com/a/49557901.
// TODO - add suppport for the ModulePath after dropping JDK 8 support.
private[this] def urlsFromClasspath(): Array[URL] = {
val classpath: String = System.getProperty("java.class.path")
classpath.split(File.pathSeparator).map { (pathEntry: String) =>
Paths.get(pathEntry).toAbsolutePath().toUri().toURL
}
}
// package protected for testing
private[app] def getEntries(loader: ClassLoader): Seq[(URI, ClassLoader)] = {
val parent = Option(loader.getParent)
val ownURIs: Vector[(URI, ClassLoader)] = for {
urlLoader <- Vector(loader).map {
case urlClassLoader: URLClassLoader => urlClassLoader
case cl => new URLClassLoader(urlsFromClasspath(), cl)
}
urls <- Option(urlLoader.getURLs()).toVector
url <- urls if url != null
} yield (url.toURI -> loader)
val p = parent.toSeq.flatMap(getEntries)
ownURIs ++ p
}
// package protected for testing
private[app] def browseUri(
uri: URI,
loader: ClassLoader,
buf: Builder[CpInfo, Seq[CpInfo]]
): Unit =
browseUri0(uri, loader, buf, mutable.Set[URI]())
private[this] def browseUri0(
uri: URI,
loader: ClassLoader,
buf: Builder[CpInfo, Seq[CpInfo]],
history: mutable.Set[URI]
): Unit = {
if (!history.contains(uri)) {
history.add(uri)
if (uri.getScheme != "file")
return
val f = new File(uri)
if (!(f.exists() && f.canRead))
return
if (f.isDirectory)
browseDir(f, loader, "", buf)
else
browseJar(f, loader, buf, history)
}
}
private[this] def browseDir(
dir: File,
loader: ClassLoader,
prefix: String,
buf: Builder[CpInfo, Seq[CpInfo]]
): Unit = {
if (ignoredPackages.contains(prefix))
return
for (f <- dir.listFiles)
if (f.isDirectory && f.canRead) {
browseDir(f, loader, prefix + f.getName + "/", buf)
} else
processFile(prefix, f, buf)
}
protected def processFile(prefix: String, file: File, buf: Builder[CpInfo, Seq[CpInfo]]): Unit
private def browseJar(
file: File,
loader: ClassLoader,
buf: Builder[CpInfo, Seq[CpInfo]],
seenUris: mutable.Set[URI]
): Unit = {
val jarFile =
try new JarFile(file)
catch {
case _: IOException => return // not a Jar file
}
try {
for (uri <- jarClasspath(file, jarFile.getManifest)) {
browseUri0(uri, loader, buf, seenUris)
}
for {
e <- jarFile.entries.asScala if !e.isDirectory
n = e.getName if !ignoredPackages.exists(p => n.startsWith(p))
} {
processJarEntry(jarFile, e, buf)
}
} finally {
try jarFile.close()
catch {
case _: IOException =>
}
}
}
protected def processJarEntry(
jarFile: JarFile,
entry: JarEntry,
buf: Builder[CpInfo, Seq[CpInfo]]
): Unit
private def jarClasspath(jarFile: File, manifest: java.util.jar.Manifest): Seq[URI] =
for {
m <- Option(manifest).toSeq
attr <- Option(m.getMainAttributes.getValue("Class-Path")).toSeq
el <- attr.split(" ").toSeq
uri <- uriFromJarClasspath(jarFile, el)
} yield uri
private def uriFromJarClasspath(jarFile: File, path: String): Option[URI] =
try {
val uri = new URI(path)
if (uri.isAbsolute)
Some(uri)
else
Some(new File(jarFile.getParentFile, path.replace('/', File.separatorChar)).toURI)
} catch {
case _: URISyntaxException => None
}
}
private[app] class FlagClassPath extends ClassPath[ClassPath.FlagInfo] {
protected def ignoredPackages: Set[String] =
ClassPath.IgnoredPackages
private[this] def isClass(name: String): Boolean =
name.endsWith(".class") && (name.endsWith("$.class") || !name.contains("$"))
protected def processFile(
prefix: String,
file: File,
buf: Builder[ClassPath.FlagInfo, Seq[ClassPath.FlagInfo]]
): Unit = {
val name = file.getName
if (isClass(name)) {
buf += ClassPath.FlagInfo(prefix + name)
}
}
protected def processJarEntry(
jarFile: JarFile,
entry: JarEntry,
buf: Builder[ClassPath.FlagInfo, Seq[ClassPath.FlagInfo]]
): Unit = {
val name = entry.getName
if (isClass(name)) {
buf += ClassPath.FlagInfo(name)
}
}
}
private[app] class LoadServiceClassPath extends ClassPath[ClassPath.LoadServiceInfo] {
protected def ignoredPackages: Set[String] =
ClassPath.IgnoredPackages ++ loadServiceIgnoredPaths()
private[this] def ifaceOfName(name: String): Option[String] =
if (!name.contains("META-INF")) None
else
name.split("/").takeRight(3) match {
case Array("META-INF", "services", iface) => Some(iface)
case _ => None
}
private[app] def readLines(source: Source): Seq[String] = {
try {
source.getLines().toVector.flatMap { line =>
val commentIdx = line.indexOf('#')
val end = if (commentIdx != -1) commentIdx else line.length
val str = line.substring(0, end).trim
if (str.isEmpty) Nil else Seq(str)
}
} catch {
case ex: MalformedInputException => Nil /* skip malformed files (e.g. non UTF-8) */
} finally {
source.close()
}
}
protected def processFile(
prefix: String,
file: File,
buf: Builder[ClassPath.LoadServiceInfo, Seq[ClassPath.LoadServiceInfo]]
): Unit = {
for (iface <- ifaceOfName(prefix + file.getName)) {
val source = Source.fromFile(file, "UTF-8")
val lines = readLines(source)
buf += ClassPath.LoadServiceInfo(prefix + file.getName, iface, lines)
}
}
protected def processJarEntry(
jarFile: JarFile,
entry: JarEntry,
buf: Builder[ClassPath.LoadServiceInfo, Seq[ClassPath.LoadServiceInfo]]
): Unit = {
for (iface <- ifaceOfName(entry.getName)) {
val source = Source.fromInputStream(jarFile.getInputStream(entry), "UTF-8")
val lines = readLines(source)
buf += ClassPath.LoadServiceInfo(entry.getName, iface, lines)
}
}
}
| twitter/util | util-app/src/main/scala/com/twitter/app/ClassPath.scala | Scala | apache-2.0 | 8,036 |
package org.jetbrains.plugins.scala
package codeInspection
package collections
import com.intellij.testFramework.EditorTestUtil
/**
* @author Nikolay.Tropin
*/
class MapFlattenTest extends OperationsOnCollectionInspectionTest {
import EditorTestUtil.{SELECTION_END_TAG => END, SELECTION_START_TAG => START}
override protected val classOfInspection: Class[_ <: OperationOnCollectionInspection] =
classOf[MapFlattenInspection]
override protected val hint: String =
InspectionBundle.message("replace.map.flatten.with.flatMap")
def test1(): Unit = {
doTest(
s"Seq().${START}map(Seq(_)).flatten$END",
"Seq().map(Seq(_)).flatten",
"Seq().flatMap(Seq(_))"
)
}
def testInfix(): Unit = {
doTest(
s"Seq() ${START}map (x => Seq(x)) flatten$END",
"Seq() map (x => Seq(x)) flatten",
"Seq() flatMap (x => Seq(x))"
)
}
def testArray(): Unit = {
doTest(
s"Array().${START}map(Seq(_)).flatten$END",
"Array().map(Seq(_)).flatten",
"Array().flatMap(Seq(_))"
)
}
def testArrayInside(): Unit = {
doTest(
s"Seq(1).${START}map(x => Array(x)).flatten$END",
"Seq(1).map(x => Array(x)).flatten",
"Seq(1).flatMap(x => Array(x))"
)
}
def testStringInside(): Unit = {
doTest(
s"Seq(1).${START}map(_.toString).flatten$END",
"Seq(1).map(_.toString).flatten",
"Seq(1).flatMap(_.toString)"
)
}
def testSCL10574(): Unit = {
checkTextHasNoErrors("Seq(1).map(Option.apply).flatten")
}
def testSCL12675(): Unit = {
checkTextHasNoErrors(
"""
|val r = Map(1 -> List(1,2,3), 2 -> List(3,4,5))
|r.map(n => n._2.map(z => (n._1, z))).flatten
""".stripMargin)
}
def testSCL10483(): Unit ={
checkTextHasNoErrors(
"""
|def f(a: String, b: String) = a + b
|val seq = Seq(("foo", "bar"))
|seq.map((f _).tupled).flatten.headOption
""".stripMargin)
}
}
| jastice/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/codeInspection/collections/MapFlattenTest.scala | Scala | apache-2.0 | 1,970 |
/*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.recorder.scenario.template
import io.gatling.BaseSpec
import io.gatling.recorder.scenario.{ RequestBodyParams, RequestElement }
import io.netty.handler.codec.http.EmptyHttpHeaders
class RequestTemplateSpec extends BaseSpec {
val url = "http://gatling.io/path1/file1"
val simulationClass = "Simulation Class"
def mockRequestBody(paramName: String, paramValue: String) = RequestBodyParams(List((paramName, paramValue)))
def mockRequestElement(paramName: String, paramValue: String) =
new RequestElement(url, "post", EmptyHttpHeaders.INSTANCE, Some(mockRequestBody(paramName, paramValue)), None, 200, Nil)
"request template" should "not wrap with joinStrings strings shorter than 65535 characters" in {
val mockedRequest1 = mockRequestElement("name", "short")
val res1 = RequestTemplate.render(simulationClass, mockedRequest1, new ExtractedUris(Seq(mockedRequest1)))
res1 should include(".formParam(\\"name\\", \\"short\\")")
val mockedRequest2 = mockRequestElement("name", "1" * 65534)
val res2 = RequestTemplate.render(simulationClass, mockedRequest2, new ExtractedUris(Seq(mockedRequest2)))
res2 should not include "Seq"
res2 should not include ".mkString"
}
it should "wrap with joinStrings strings with not less than 65535 characters" in {
val mockedRequest = mockRequestElement("name", "a" * 65535)
val res = RequestTemplate.render(simulationClass, mockedRequest, new ExtractedUris(Seq(mockedRequest)))
res should include("Seq(\\"" + "a" * 65534 + "\\", \\"a\\").mkString")
}
}
| wiacekm/gatling | gatling-recorder/src/test/scala/io/gatling/recorder/scenario/template/RequestTemplateSpec.scala | Scala | apache-2.0 | 2,178 |
package mbilski.spray.hmac
case class HmacData(uuid: String, hash: String)
trait Authentication[A] { this: Signer =>
def authenticate(hmac: HmacData, uri: String): Option[A] = {
val (account, secret) = accountAndSecret(hmac.uuid)
for (a <- account; s <- secret if valid(hmac.hash, s, uri)) yield a
}
def accountAndSecret(uuid: String): (Option[A], Option[String])
}
| mbilski/spray-hmac | src/main/scala/mbilski/spray/hmac/Authentication.scala | Scala | apache-2.0 | 383 |
package org.ferrit.core.crawler
import akka.actor.{Actor, Props, ActorRef}
import akka.event.Logging
import akka.pattern.{ask, pipe}
import akka.util.Timeout
import akka.routing.{Listeners, Deafen, WithListeners}
import scala.concurrent.{ExecutionContext, Future}
import scala.concurrent.duration._
import scala.util.{Failure,Success}
import org.joda.time.{DateTime, Duration}
import org.ferrit.core.filter.UriFilter
import org.ferrit.core.uri.{CrawlUri, Frontier, FetchJob, UriCache}
import org.ferrit.core.crawler.FetchMessages._
import org.ferrit.core.http.{HttpClient, Get, Response, DefaultResponse, Stats}
import org.ferrit.core.model.CrawlJob
import org.ferrit.core.parser.{ContentParser, ParserResult}
import org.ferrit.core.robot.RobotRulesCacheActor
import org.ferrit.core.robot.RobotRulesCacheActor.{Allow, DelayFor}
import org.ferrit.core.util.{Counters, Media, MediaCounters, Stopwatch}
/**
* This has become a big ball of mud that needs splitting up.
* Has too many responsibilities at the moment.
*/
class CrawlWorker(
job: CrawlJob,
config: CrawlConfig,
frontier: Frontier,
uriCache: UriCache,
httpClient: HttpClient,
robotRulesCache: ActorRef,
contentParser: ContentParser,
stopRule: StopRule
) extends Actor with Listeners {
import CrawlWorker._
private [crawler] implicit val execContext = context.system.dispatcher
private [crawler] val scheduler = context.system.scheduler
private [crawler] val log = Logging(context.system, getClass)
private [crawler] val robotRequestTimeout = new Timeout(20.seconds)
private [crawler] val supportedSchemes = Seq("http", "https")
private [crawler] val started = new DateTime
private [crawler] var fcounters = Counters() // fetch attempts
private [crawler] var rcounters = Counters() // response codes
private [crawler] var mcounters = MediaCounters() // count media types html, css etc
private [crawler] var state = CrawlStatus(
crawlStop = new DateTime().plus(config.crawlTimeoutMillis)
)
override def receive = crawlPending
def crawlPending: Receive = listenerManagement orElse {
case Run =>
val outcome = initCrawler
outcome
.pipeTo(sender)
.map {reply =>
reply match {
case StartOkay(_, _) =>
context.become(crawlRunning)
gossip(reply)
self ! NextDequeue
case StartFailed(_, _) =>
stopWith(reply)
}
}
}
def crawlRunning: Receive = listenerManagement orElse {
case NextDequeue =>
val outcome: CrawlOutcome = stopRule.ask(config, state, fcounters, frontier.size)
outcome match {
case KeepCrawling => scheduleNext
case otherOutcome => stopWith(Stopped(otherOutcome, completeJob(otherOutcome, None)))
}
case NextFetch(fe) => if (state.alive) fetchNext(fe)
case StopCrawl => state = state.stop // Stopping not immediate if in a fetch
}
private def stopWith(msg: Any):Unit = {
state = state.dead
gossip(msg)
context.stop(self)
}
private def stopWithFailure(t: Throwable):Unit = {
val outcome = InternalError("Crawler failed to complete: " + t.getLocalizedMessage, t)
stopWith(
Stopped(outcome, completeJob(outcome, Some(t)))
)
}
private def initCrawler: Future[Started] = config.validated match {
case Failure(t) => Future.successful(StartFailed(t, config))
case Success(b) =>
val jobs = config.seeds.map(s => FetchJob(s, 0)).toSet
enqueueFetchJobs(jobs)
.map(_ => StartOkay("Started okay", job))
.recover({ case throwable => StartFailed(throwable, config) })
}
private def completeJob(outcome: CrawlOutcome, throwOpt: Option[Throwable]):CrawlJob = {
val finished = new DateTime
val message = throwOpt match {
case Some(t) => outcome.message + ": " + t.getLocalizedMessage
case None => outcome.message
}
job.copy(
snapshotDate = new DateTime,
finishedDate = Some(finished),
duration = new Duration(started, finished).getMillis,
outcome = Some(outcome.state),
message = Some(message),
urisSeen = uriCache.size,
urisQueued = frontier.size,
fetchCounters = fcounters.counters,
responseCounters = rcounters.counters,
mediaCounters = mcounters.counters
)
}
/**
* Must batch enqueue FetchJob so that async fetch decisions about
* all the FetchJob are made BEFORE trying to access Frontier and UriCache
* to prevent a race condition when accessing the Frontier.
*/
private def enqueueFetchJobs(fetchJobs: Set[FetchJob]):Future[Unit] = {
val future:Future[Set[(FetchJob, CanFetch)]] = Future.sequence(
fetchJobs.map({f => isFetchable(f)})
)
future.recoverWith({ case t => Future.failed(t) })
future.map(
_.map({pair =>
val f = pair._1
val d = pair._2
dgossip(FetchDecision(f.uri, d))
if (OkayToFetch == d) {
frontier.enqueue(f) // only modify AFTER async fetch checks
uriCache.put(f.uri) // mark URI as seen
dgossip(FetchQueued(f))
}
})
)
}
// Must check robot rules after UriFilter test
// to avoid unnecessary downloading of robots.txt files for
// sites that will never be visited anyway and robot fetch fails
// on unsupported schemes like mailto/ftp.
private def isFetchable(f: FetchJob):Future[(FetchJob, CanFetch)] = {
try {
val uri = f.uri
val scheme = uri.reader.scheme
if (uriCache.contains(uri)) {
Future.successful((f, SeenAlready))
} else if (supportedSchemes.find(scheme == _).isEmpty) {
Future.successful((f, UnsupportedScheme))
} else if (!config.uriFilter.accept(uri)) {
Future.successful((f, UriFilterRejected))
} else {
robotRulesCache
.ask(Allow(config.getUserAgent, uri.reader))(robotRequestTimeout)
.mapTo[Boolean]
.map(ok => {
val d = if (ok) OkayToFetch else RobotsExcluded
(f, d) // resolved as ...
})
}
} catch {
case t: Throwable => Future.failed(t)
}
}
private def scheduleNext:Unit = {
frontier.dequeue match {
case Some(f: FetchJob) =>
getFetchDelayFor(f.uri) map({delay =>
gossip(FetchScheduled(f, delay))
scheduler.scheduleOnce(delay.milliseconds, self, NextFetch(f))
})
case None => // empty frontier, code smell to fix
}
}
private def fetchNext(f: FetchJob):Unit = {
def doNext = self ! NextDequeue
val stopwatch = new Stopwatch
val result = (for {
response <- fetch(f)
parseResultOpt = parseResponse(response)
} yield {
emitFetchResult(f, response, parseResultOpt, stopwatch.duration)
parseResultOpt match {
case None => doNext
case Some(parserResult) =>
if (f.depth >= config.maxDepth) {
dgossip(DepthLimit(f))
doNext
} else {
var uris = Set.empty[CrawlUri]
var errors = Set.empty[Option[String]]
parserResult.links.foreach(l => l.crawlUri match {
case Some(uri) => uris = uris + uri
case _ => errors = errors + l.failMessage
})
errors.foreach(_ match {
case Some(msg) => log.error(s"URI parse fail: [$msg]")
case _ =>
})
val jobs = uris.map(FetchJob(_, f.depth + 1))
// must enqueue BEFORE next fetch
enqueueFetchJobs(jobs)
.map({_ => doNext})
.recover({ case t => stopWithFailure(t) })
}
}
})
result.recover({
case t =>
gossip(FetchError(f.uri, t))
doNext
})
}
private def fetch(f: FetchJob): Future[Response] = {
val uri = f.uri
val request = Get(config.getUserAgent, uri)
def onRequestFail(t: Throwable) = {
// Handle internal error (as distinct from HTTP request error
// on target server. Synthesize Response so that crawl can continue.
log.error(t, "Request failed, reason: " + t.getLocalizedMessage)
gossip(FetchError(uri, t))
Future.successful( // technically an async success
DefaultResponse(
-1, // internal error code
Map.empty,
Array.empty[Byte], // t.getMessage.getBytes,
Stats.empty,
request
)
)
}
fcounters = fcounters.increment(FetchAttempts)
dgossip(FetchGo(f))
httpClient.request(request).map({response =>
dgossip(FetchResponse(uri, response.statusCode))
response
}).recoverWith({
case t: Throwable => onRequestFail(t)
})
}
private def emitFetchResult(
fetchJob: FetchJob,
response: Response,
result: Option[ParserResult],
duration: Long):Unit = {
gossip(FetchResult(
response.statusCode,
fetchJob,
job.copy(
snapshotDate = new DateTime,
duration = new Duration(started, new DateTime).getMillis,
urisSeen = uriCache.size,
urisQueued = frontier.size,
fetchCounters = fcounters.counters,
responseCounters = rcounters.counters,
mediaCounters = mcounters.counters
),
response,
duration, // Represents combined fetch + parse not including enqueue time
result
))
}
private def parseResponse(response: Response):Option[ParserResult] = {
rcounters = rcounters.increment(""+response.statusCode)
response.statusCode match {
case code if (code >= 200 && code <= 299) =>
fcounters = fcounters.increment(FetchSucceeds)
mcounters = mcounters.add(
response.contentType.getOrElse("undefined"), 1, response.contentLength
)
if (!contentParser.canParse(response)) {
None
} else {
Some(contentParser.parse(response))
}
case code if (code >= 300 && code <= 399) =>
fcounters = fcounters.increment(FetchRedirects)
None
case other_codes =>
fcounters = fcounters.increment(FetchFails)
None
}
}
/**
* Compute the politeness delay before the next fetch.
* Two possible delay values need considering:
*
* 1. A crawl-delay directive in robots.txt file, if it exists
* 2. The default delay in the CrawlConfig
*
* If both values are available then the longest is chosen.
*/
private def getFetchDelayFor(uri: CrawlUri):Future[Long] = {
val defDelay = config.crawlDelayMillis
robotRulesCache
.ask(DelayFor(config.getUserAgent, uri.reader))(robotRequestTimeout)
.mapTo[Option[Int]]
.map(_ match {
case Some(rulesDelay) => Math.max(rulesDelay, defDelay)
case None => defDelay
})
}
/**
* Uncomment to enable additional gossip for debugging,
* but realise that this will generate a considerably larger
* number of Actor messages as a consequence.
*/
private def dgossip(msg: Any) = {} //gossip(msg)
}
object CrawlWorker {
// Public messages
case object Run
case object StopCrawl
sealed abstract class Started
case class StartOkay(msg: String, job: CrawlJob) extends Started()
case class StartFailed(t: Throwable, config: CrawlConfig) extends Started()
case class Stopped(outcome: CrawlOutcome, job: CrawlJob)
case object EmptyFrontier
// Internal messages
private [crawler] case object NextDequeue
private [crawler] case class NextFetch(f: FetchJob)
val FetchAttempts = "FetchAttempts"
val FetchSucceeds = "FetchSucceeds"
val FetchFails = "FetchFails"
val FetchRedirects = "Redirects"
} | reggoodwin/ferrit | src/main/scala/org/ferrit/core/crawler/CrawlWorker.scala | Scala | mit | 11,786 |
package io.vamp.model.resolver
import io.vamp.common.{ Config, NamespaceProvider }
import io.vamp.model.artifact.{ GlobalReference, ValueReference }
trait ConfigurationValueResolver extends GlobalValueResolver {
this: NamespaceProvider ⇒
def valueForReference: PartialFunction[ValueReference, String] = {
case GlobalReference("conf" | "config" | "configuration", path) ⇒ Config.string(path)()
}
}
| dragoslav/vamp | model/src/main/scala/io/vamp/model/resolver/ConfigurationValueResolver.scala | Scala | apache-2.0 | 414 |
package models.admin.audits
import scalaz._
import Scalaz._
import scalaz.effect.IO
import scalaz.EitherT._
import scalaz.Validation
import scalaz.Validation.FlatMap._
import scalaz.NonEmptyList._
import scalaz.syntax.SemigroupOps
import models.tosca.KeyValueList
import net.liftweb.json._
import net.liftweb.json.scalaz.JsonScalaz._
import io.megam.util.Time
import org.joda.time.{DateTime, Period}
import org.joda.time.format.DateTimeFormat
import models.admin.ReportResult
object Scavenger {
def apply(email: String): Scavenger = new Scavenger(email)
}
class Scavenger(email: String) extends controllers.stack.ImplicitJsonFormats {
private def myorgs = models.team.Organizations.findByEmail(email)
def nukeLittered: ValidationNel[Throwable, Option[String]] = {
for {
aal <- littered
} yield aal
}
def nukeDeployed: ValidationNel[Throwable, Option[String]] = {
for {
orgs <- myorgs leftMap { err: NonEmptyList[Throwable] ⇒ err }
aal <- deployed(orgs)
} yield aal
"nuked".some.successNel[Throwable] //we keep moving to the next step
}
def nukeTelemetry = {
val t = (for {
hd <- models.billing.Billedhistories.delete(email)
bhd <- models.billing.Billingtransactions.delete(email)
bad <- models.billing.Balances.delete(email)
chd <- models.billing.Credits.delete(email)
qud <- models.billing.Quotas.delete(email)
qud <- models.tosca.Sensors.delete(email)
} yield "nuked".some)
"nuked".some.successNel[Throwable] //we keep moving to the next step
}
def nukeWhitePebbles = {
for {
hd <- models.events.EventsBilling.delete(email)
bhd <- models.events.EventsContainer.delete(email)
bad <- models.events.EventsVm.delete(email)
bad <- models.events.EventsStorage.delete(email)
} yield "whitepebbles.done".some
}
def nukeIdentity: ValidationNel[Throwable, Option[io.megam.auth.stack.AccountResult]] = {
for {
aal <- delete
} yield aal
}
private def littered = {
for {
snps <- models.disks.Snapshots.deleteByEmail(email)
diks <- models.disks.Disks.deleteByEmail(email)
bak <- models.disks.Backups.deleteByEmail(email)
} yield "littered.done".some
}
private def deployed(orgs: Seq[models.team.OrganizationsResult]) = {
(orgs.map { org => {
for {
asms <- models.tosca.Assemblies.findByEmail(email, org.id)
amnk <- mkTrashers(asms)
aenk <- invokeTrashers(amnk)
ssh <- models.base.SshKeys.delete(org.id)
dod <- models.team.Domains.delete(org.id)
} yield "deployed.done".some
}
})
"nuked.deployed".some.successNel[Throwable] //we keep moving to the next step
}
private def delete = {
for {
add <- models.addons.Addons.delete(email)
ord <- models.team.Organizations.delete(email)
dcd <- models.base.Accounts.delete(email)
} yield dcd
}
private def mkTrashers(ars :Seq[models.tosca.AssembliesResult]) = {
(ars.map { ar =>
ar.assemblies.map(models.admin.audits.Trasher(ar.id, _, email))
}).flatten.successNel
}
private def invokeTrashers(trs :Seq[models.admin.audits.Trasher]) = {
val n = trs.map(_.nukeDeployed)
if (!n.isEmpty) { n.head } else {
models.tosca.AssemblyResult("","","","", models.tosca.ComponentLinks.empty,"",
models.tosca.PoliciesList.empty, models.tosca.KeyValueList.empty,
models.tosca.KeyValueList.empty, "", "", "", utils.DateHelper.now()).successNel
}
}
}
| megamsys/verticegateway | app/models/admin/audits/Scavenger.scala | Scala | mit | 3,606 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.api
import java.util.Properties
import kafka.server.KafkaConfig
import kafka.utils.{ShutdownableThread, TestUtils}
import org.apache.kafka.clients.consumer.{ConsumerConfig, KafkaConsumer}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig}
import org.apache.kafka.clients.producer.internals.ErrorLoggingCallback
import org.apache.kafka.common.TopicPartition
import org.junit.Assert._
import org.junit.Test
import scala.jdk.CollectionConverters._
import scala.collection.mutable
class TransactionsBounceTest extends IntegrationTestHarness {
private val producerBufferSize = 65536
private val serverMessageMaxBytes = producerBufferSize/2
private val numPartitions = 3
private val outputTopic = "output-topic"
private val inputTopic = "input-topic"
val overridingProps = new Properties()
overridingProps.put(KafkaConfig.AutoCreateTopicsEnableProp, false.toString)
overridingProps.put(KafkaConfig.MessageMaxBytesProp, serverMessageMaxBytes.toString)
// Set a smaller value for the number of partitions for the offset commit topic (__consumer_offset topic)
// so that the creation of that topic/partition(s) and subsequent leader assignment doesn't take relatively long
overridingProps.put(KafkaConfig.ControlledShutdownEnableProp, true.toString)
overridingProps.put(KafkaConfig.UncleanLeaderElectionEnableProp, false.toString)
overridingProps.put(KafkaConfig.AutoLeaderRebalanceEnableProp, false.toString)
overridingProps.put(KafkaConfig.OffsetsTopicPartitionsProp, 1.toString)
overridingProps.put(KafkaConfig.OffsetsTopicReplicationFactorProp, 3.toString)
overridingProps.put(KafkaConfig.MinInSyncReplicasProp, 2.toString)
overridingProps.put(KafkaConfig.TransactionsTopicPartitionsProp, 1.toString)
overridingProps.put(KafkaConfig.TransactionsTopicReplicationFactorProp, 3.toString)
overridingProps.put(KafkaConfig.GroupMinSessionTimeoutMsProp, "10") // set small enough session timeout
overridingProps.put(KafkaConfig.GroupInitialRebalanceDelayMsProp, "0")
// This is the one of the few tests we currently allow to preallocate ports, despite the fact that this can result in transient
// failures due to ports getting reused. We can't use random ports because of bad behavior that can result from bouncing
// brokers too quickly when they get new, random ports. If we're not careful, the client can end up in a situation
// where metadata is not refreshed quickly enough, and by the time it's actually trying to, all the servers have
// been bounced and have new addresses. None of the bootstrap nodes or current metadata can get them connected to a
// running server.
//
// Since such quick rotation of servers is incredibly unrealistic, we allow this one test to preallocate ports, leaving
// a small risk of hitting errors due to port conflicts. Hopefully this is infrequent enough to not cause problems.
override def generateConfigs = {
FixedPortTestUtils.createBrokerConfigs(brokerCount, zkConnect, enableControlledShutdown = true)
.map(KafkaConfig.fromProps(_, overridingProps))
}
override protected def brokerCount: Int = 4
@Test
def testWithGroupId(): Unit = {
testBrokerFailure((producer, groupId, consumer) =>
producer.sendOffsetsToTransaction(TestUtils.consumerPositions(consumer).asJava, groupId))
}
@Test
def testWithGroupMetadata(): Unit = {
testBrokerFailure((producer, _, consumer) =>
producer.sendOffsetsToTransaction(TestUtils.consumerPositions(consumer).asJava, consumer.groupMetadata()))
}
private def testBrokerFailure(commit: (KafkaProducer[Array[Byte], Array[Byte]],
String, KafkaConsumer[Array[Byte], Array[Byte]]) => Unit): Unit = {
// basic idea is to seed a topic with 10000 records, and copy it transactionally while bouncing brokers
// constantly through the period.
val consumerGroup = "myGroup"
val numInputRecords = 10000
createTopics()
TestUtils.seedTopicWithNumberedRecords(inputTopic, numInputRecords, servers)
val consumer = createConsumerAndSubscribe(consumerGroup, List(inputTopic))
val producer = createTransactionalProducer("test-txn")
producer.initTransactions()
val scheduler = new BounceScheduler
scheduler.start()
try {
var numMessagesProcessed = 0
var iteration = 0
while (numMessagesProcessed < numInputRecords) {
val toRead = Math.min(200, numInputRecords - numMessagesProcessed)
trace(s"$iteration: About to read $toRead messages, processed $numMessagesProcessed so far..")
val records = TestUtils.pollUntilAtLeastNumRecords(consumer, toRead)
trace(s"Received ${records.size} messages, sending them transactionally to $outputTopic")
producer.beginTransaction()
val shouldAbort = iteration % 3 == 0
records.foreach { record =>
producer.send(TestUtils.producerRecordWithExpectedTransactionStatus(outputTopic, null, record.key, record.value, !shouldAbort), new ErrorLoggingCallback(outputTopic, record.key, record.value, true))
}
trace(s"Sent ${records.size} messages. Committing offsets.")
commit(producer, consumerGroup, consumer)
if (shouldAbort) {
trace(s"Committed offsets. Aborting transaction of ${records.size} messages.")
producer.abortTransaction()
TestUtils.resetToCommittedPositions(consumer)
} else {
trace(s"Committed offsets. committing transaction of ${records.size} messages.")
producer.commitTransaction()
numMessagesProcessed += records.size
}
iteration += 1
}
} finally {
scheduler.shutdown()
}
val verifyingConsumer = createConsumerAndSubscribe("randomGroup", List(outputTopic), readCommitted = true)
val recordsByPartition = new mutable.HashMap[TopicPartition, mutable.ListBuffer[Int]]()
TestUtils.pollUntilAtLeastNumRecords(verifyingConsumer, numInputRecords).foreach { record =>
val value = TestUtils.assertCommittedAndGetValue(record).toInt
val topicPartition = new TopicPartition(record.topic(), record.partition())
recordsByPartition.getOrElseUpdate(topicPartition, new mutable.ListBuffer[Int])
.append(value)
}
val outputRecords = new mutable.ListBuffer[Int]()
recordsByPartition.values.foreach { case (partitionValues) =>
assertEquals("Out of order messages detected", partitionValues, partitionValues.sorted)
outputRecords.appendAll(partitionValues)
}
val recordSet = outputRecords.toSet
assertEquals(numInputRecords, recordSet.size)
val expectedValues = (0 until numInputRecords).toSet
assertEquals(s"Missing messages: ${expectedValues -- recordSet}", expectedValues, recordSet)
}
private def createTransactionalProducer(transactionalId: String) = {
val props = new Properties()
props.put(ProducerConfig.ACKS_CONFIG, "all")
props.put(ProducerConfig.BATCH_SIZE_CONFIG, "512")
props.put(ProducerConfig.TRANSACTIONAL_ID_CONFIG, transactionalId)
props.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, "true")
createProducer(configOverrides = props)
}
private def createConsumerAndSubscribe(groupId: String,
topics: List[String],
readCommitted: Boolean = false) = {
val consumerProps = new Properties
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, groupId)
consumerProps.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false")
consumerProps.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG,
if (readCommitted) "read_committed" else "read_uncommitted")
val consumer = createConsumer(configOverrides = consumerProps)
consumer.subscribe(topics.asJava)
consumer
}
private def createTopics() = {
val topicConfig = new Properties()
topicConfig.put(KafkaConfig.MinInSyncReplicasProp, 2.toString)
createTopic(inputTopic, numPartitions, 3, topicConfig)
createTopic(outputTopic, numPartitions, 3, topicConfig)
}
private class BounceScheduler extends ShutdownableThread("daemon-broker-bouncer", false) {
override def doWork(): Unit = {
for (server <- servers) {
trace("Shutting down server : %s".format(server.config.brokerId))
server.shutdown()
server.awaitShutdown()
Thread.sleep(500)
trace("Server %s shut down. Starting it up again.".format(server.config.brokerId))
server.startup()
trace("Restarted server: %s".format(server.config.brokerId))
Thread.sleep(500)
}
(0 until numPartitions).foreach(partition => TestUtils.waitUntilLeaderIsElectedOrChanged(zkClient, outputTopic, partition))
}
override def shutdown(): Unit = {
super.shutdown()
}
}
}
| sslavic/kafka | core/src/test/scala/integration/kafka/api/TransactionsBounceTest.scala | Scala | apache-2.0 | 9,625 |
package com.github.morikuni.locest.frequency.domain.model
import com.github.morikuni.locest.util.Identifier
case class AreaId(override val value: Int) extends Identifier[Int] | morikuni/locest | frequency/app/com/github/morikuni/locest/frequency/domain/model/Area.scala | Scala | mit | 176 |
package org.monalang.monac.iface
import org.monalang.monac.common.util.ListUtil
object OptionName extends Enumeration {
type OptionName = Value
val OUTPUT = Value("output")
val ARCHITECTURE = Value("sets the target architecture")
}
import OptionName._
/*
* NOTE currently there is a 1-1 correspondence between command-line arguments
* and compile options, Should parse parameterMap -> optionMap, and return from
* optionMap
*/
case class CompileOptions(
sources: Set[String],
objects: Set[String],
private val parameterMap: Map[OptionName, List[String]],
private val duplicateOptions: Set[OptionName]) {
/*
* TODO parse, use configuration objects for options
* like architecture
*/
val optionMap = parameterMap
/**
* Get option parameters
*
* @return scala.Option containing a list of parameters for the specified OptionName
*/
def get(name: OptionName): Option[List[String]] = {
if (duplicateOptions contains name) throw
new IllegalStateException("Duplicate option \\"" + name + "\\"")
optionMap.get(name)
}
if (sources.isEmpty && objects.isEmpty) throw new Exception("No input files")
}
object CompileOptions {
private val argumentDefinitions = List(
OptionDefinition(OUTPUT, List("-o", "--output")),
OptionDefinition(ARCHITECTURE, List("-a", "--architecture")))
private def isSource(argument: String) = argument.endsWith(".mona")
def apply(argumentStrings: List[String]) = {
val parsedArguments = ParsedConsoleInput(argumentDefinitions, argumentStrings)
val optionParameterMap = parsedArguments.optionArguments map (argument =>
argument.optionName -> argument.parameters) toMap
val optionNames = parsedArguments.optionArguments map (_.optionName)
// Determined now so it can be passed on to CompileOptions which reports on access
val duplicateOptionNames = ListUtil.getDuplicates(optionNames)
val sources = parsedArguments.inputs filter isSource
val objects = parsedArguments.inputs filter (!isSource(_))
new CompileOptions(sources.toSet, objects.toSet, optionParameterMap, duplicateOptionNames)
}
} | corazza/monac-scala | monac/src/main/scala/org/monalang/monac/iface/CompileOptions.scala | Scala | gpl-3.0 | 2,138 |
package models
import akka.actor.ActorRef
object WorkerStatusProtocol {
import Task._
trait WorkerStatus
case class Working(task: Task) extends WorkerStatus
case object Idle extends WorkerStatus
case class WorkerState(ref: ActorRef, status: WorkerStatus)
}
| jhejderup/Awsseract | awsseract-frontend/app/models/WorkerStatusProtocol.scala | Scala | apache-2.0 | 274 |
/*
* Copyright (c) 2014 Contributor. All rights reserved.
*/
package org.scalaide.ui.internal.editor.decorators.implicits
import org.scalaide.ui.internal.preferences.ImplicitsPreferencePage
import org.scalaide.ui.internal.actions.AbstractToggleHandler
/**
* Handler to toggle the Implicits Display (shortcut to avoid open Preferences,...)
*
* @see scala.tools.eclipse.ui.AbstractToggleHandler
*/
class ToggleImplicitsDisplayHandler extends AbstractToggleHandler("org.scala-ide.sdt.core.commands.ToggleImplicitsDisplay", ImplicitsPreferencePage.PActive)
| Kwestor/scala-ide | org.scala-ide.sdt.core/src/org/scalaide/ui/internal/editor/decorators/implicits/ToggleImplicitsDisplayHandler.scala | Scala | bsd-3-clause | 561 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.streaming
import scala.collection.JavaConverters._
import scala.collection.mutable.{Map => MutableMap}
import org.apache.spark.sql.{Dataset, SparkSession}
import org.apache.spark.sql.catalyst.encoders.RowEncoder
import org.apache.spark.sql.catalyst.expressions.{Alias, CurrentBatchTimestamp, CurrentDate, CurrentTimestamp}
import org.apache.spark.sql.catalyst.plans.logical.{LocalRelation, LogicalPlan, Project}
import org.apache.spark.sql.execution.SQLExecution
import org.apache.spark.sql.execution.datasources.v2.{StreamingDataSourceV2Relation, WriteToDataSourceV2}
import org.apache.spark.sql.execution.streaming.sources.{MicroBatchWritSupport, RateControlMicroBatchReadSupport}
import org.apache.spark.sql.sources.v2._
import org.apache.spark.sql.sources.v2.reader.streaming.{MicroBatchReadSupport, Offset => OffsetV2}
import org.apache.spark.sql.streaming.{OutputMode, ProcessingTime, Trigger}
import org.apache.spark.util.{Clock, Utils}
class MicroBatchExecution(
sparkSession: SparkSession,
name: String,
checkpointRoot: String,
analyzedPlan: LogicalPlan,
sink: BaseStreamingSink,
trigger: Trigger,
triggerClock: Clock,
outputMode: OutputMode,
extraOptions: Map[String, String],
deleteCheckpointOnStop: Boolean)
extends StreamExecution(
sparkSession, name, checkpointRoot, analyzedPlan, sink,
trigger, triggerClock, outputMode, deleteCheckpointOnStop) {
@volatile protected var sources: Seq[BaseStreamingSource] = Seq.empty
private val readSupportToDataSourceMap =
MutableMap.empty[MicroBatchReadSupport, (DataSourceV2, Map[String, String])]
private val triggerExecutor = trigger match {
case t: ProcessingTime => ProcessingTimeExecutor(t, triggerClock)
case OneTimeTrigger => OneTimeExecutor()
case _ => throw new IllegalStateException(s"Unknown type of trigger: $trigger")
}
private var watermarkTracker: WatermarkTracker = _
override lazy val logicalPlan: LogicalPlan = {
assert(queryExecutionThread eq Thread.currentThread,
"logicalPlan must be initialized in QueryExecutionThread " +
s"but the current thread was ${Thread.currentThread}")
var nextSourceId = 0L
val toExecutionRelationMap = MutableMap[StreamingRelation, StreamingExecutionRelation]()
val v2ToExecutionRelationMap = MutableMap[StreamingRelationV2, StreamingExecutionRelation]()
// We transform each distinct streaming relation into a StreamingExecutionRelation, keeping a
// map as we go to ensure each identical relation gets the same StreamingExecutionRelation
// object. For each microbatch, the StreamingExecutionRelation will be replaced with a logical
// plan for the data within that batch.
// Note that we have to use the previous `output` as attributes in StreamingExecutionRelation,
// since the existing logical plan has already used those attributes. The per-microbatch
// transformation is responsible for replacing attributes with their final values.
val disabledSources =
sparkSession.sqlContext.conf.disabledV2StreamingMicroBatchReaders.split(",")
val _logicalPlan = analyzedPlan.transform {
case streamingRelation@StreamingRelation(dataSourceV1, sourceName, output) =>
toExecutionRelationMap.getOrElseUpdate(streamingRelation, {
// Materialize source to avoid creating it in every batch
val metadataPath = s"$resolvedCheckpointRoot/sources/$nextSourceId"
val source = dataSourceV1.createSource(metadataPath)
nextSourceId += 1
logInfo(s"Using Source [$source] from DataSourceV1 named '$sourceName' [$dataSourceV1]")
StreamingExecutionRelation(source, output)(sparkSession)
})
case s @ StreamingRelationV2(
dataSourceV2: MicroBatchReadSupportProvider, sourceName, options, output, _) if
!disabledSources.contains(dataSourceV2.getClass.getCanonicalName) =>
v2ToExecutionRelationMap.getOrElseUpdate(s, {
// Materialize source to avoid creating it in every batch
val metadataPath = s"$resolvedCheckpointRoot/sources/$nextSourceId"
val readSupport = dataSourceV2.createMicroBatchReadSupport(
metadataPath,
new DataSourceOptions(options.asJava))
nextSourceId += 1
readSupportToDataSourceMap(readSupport) = dataSourceV2 -> options
logInfo(s"Using MicroBatchReadSupport [$readSupport] from " +
s"DataSourceV2 named '$sourceName' [$dataSourceV2]")
StreamingExecutionRelation(readSupport, output)(sparkSession)
})
case s @ StreamingRelationV2(dataSourceV2, sourceName, _, output, v1Relation) =>
v2ToExecutionRelationMap.getOrElseUpdate(s, {
// Materialize source to avoid creating it in every batch
val metadataPath = s"$resolvedCheckpointRoot/sources/$nextSourceId"
if (v1Relation.isEmpty) {
throw new UnsupportedOperationException(
s"Data source $sourceName does not support microbatch processing.")
}
val source = v1Relation.get.dataSource.createSource(metadataPath)
nextSourceId += 1
logInfo(s"Using Source [$source] from DataSourceV2 named '$sourceName' [$dataSourceV2]")
StreamingExecutionRelation(source, output)(sparkSession)
})
}
sources = _logicalPlan.collect { case s: StreamingExecutionRelation => s.source }
uniqueSources = sources.distinct
_logicalPlan
}
/**
* Signifies whether current batch (i.e. for the batch `currentBatchId`) has been constructed
* (i.e. written to the offsetLog) and is ready for execution.
*/
private var isCurrentBatchConstructed = false
/**
* Signals to the thread executing micro-batches that it should stop running after the next
* batch. This method blocks until the thread stops running.
*/
override def stop(): Unit = {
// Set the state to TERMINATED so that the batching thread knows that it was interrupted
// intentionally
state.set(TERMINATED)
if (queryExecutionThread.isAlive) {
sparkSession.sparkContext.cancelJobGroup(runId.toString)
queryExecutionThread.interrupt()
queryExecutionThread.join()
// microBatchThread may spawn new jobs, so we need to cancel again to prevent a leak
sparkSession.sparkContext.cancelJobGroup(runId.toString)
}
logInfo(s"Query $prettyIdString was stopped")
}
/**
* Repeatedly attempts to run batches as data arrives.
*/
protected def runActivatedStream(sparkSessionForStream: SparkSession): Unit = {
val noDataBatchesEnabled =
sparkSessionForStream.sessionState.conf.streamingNoDataMicroBatchesEnabled
triggerExecutor.execute(() => {
if (isActive) {
var currentBatchHasNewData = false // Whether the current batch had new data
startTrigger()
reportTimeTaken("triggerExecution") {
// We'll do this initialization only once every start / restart
if (currentBatchId < 0) {
populateStartOffsets(sparkSessionForStream)
logInfo(s"Stream started from $committedOffsets")
}
// Set this before calling constructNextBatch() so any Spark jobs executed by sources
// while getting new data have the correct description
sparkSession.sparkContext.setJobDescription(getBatchDescriptionString)
// Try to construct the next batch. This will return true only if the next batch is
// ready and runnable. Note that the current batch may be runnable even without
// new data to process as `constructNextBatch` may decide to run a batch for
// state cleanup, etc. `isNewDataAvailable` will be updated to reflect whether new data
// is available or not.
if (!isCurrentBatchConstructed) {
isCurrentBatchConstructed = constructNextBatch(noDataBatchesEnabled)
}
// Record the trigger offset range for progress reporting *before* processing the batch
recordTriggerOffsets(from = committedOffsets, to = availableOffsets)
// Remember whether the current batch has data or not. This will be required later
// for bookkeeping after running the batch, when `isNewDataAvailable` will have changed
// to false as the batch would have already processed the available data.
currentBatchHasNewData = isNewDataAvailable
currentStatus = currentStatus.copy(isDataAvailable = isNewDataAvailable)
if (isCurrentBatchConstructed) {
if (currentBatchHasNewData) updateStatusMessage("Processing new data")
else updateStatusMessage("No new data but cleaning up state")
runBatch(sparkSessionForStream)
} else {
updateStatusMessage("Waiting for data to arrive")
}
}
finishTrigger(currentBatchHasNewData) // Must be outside reportTimeTaken so it is recorded
// Signal waiting threads. Note this must be after finishTrigger() to ensure all
// activities (progress generation, etc.) have completed before signaling.
withProgressLocked { awaitProgressLockCondition.signalAll() }
// If the current batch has been executed, then increment the batch id and reset flag.
// Otherwise, there was no data to execute the batch and sleep for some time
if (isCurrentBatchConstructed) {
currentBatchId += 1
isCurrentBatchConstructed = false
} else Thread.sleep(pollingDelayMs)
}
updateStatusMessage("Waiting for next trigger")
isActive
})
}
/**
* Populate the start offsets to start the execution at the current offsets stored in the sink
* (i.e. avoid reprocessing data that we have already processed). This function must be called
* before any processing occurs and will populate the following fields:
* - currentBatchId
* - committedOffsets
* - availableOffsets
* The basic structure of this method is as follows:
*
* Identify (from the offset log) the offsets used to run the last batch
* IF last batch exists THEN
* Set the next batch to be executed as the last recovered batch
* Check the commit log to see which batch was committed last
* IF the last batch was committed THEN
* Call getBatch using the last batch start and end offsets
* // ^^^^ above line is needed since some sources assume last batch always re-executes
* Setup for a new batch i.e., start = last batch end, and identify new end
* DONE
* ELSE
* Identify a brand new batch
* DONE
*/
private def populateStartOffsets(sparkSessionToRunBatches: SparkSession): Unit = {
offsetLog.getLatest() match {
case Some((latestBatchId, nextOffsets)) =>
/* First assume that we are re-executing the latest known batch
* in the offset log */
currentBatchId = latestBatchId
isCurrentBatchConstructed = true
availableOffsets = nextOffsets.toStreamProgress(sources)
/* Initialize committed offsets to a committed batch, which at this
* is the second latest batch id in the offset log. */
if (latestBatchId != 0) {
val secondLatestBatchId = offsetLog.get(latestBatchId - 1).getOrElse {
throw new IllegalStateException(s"batch ${latestBatchId - 1} doesn't exist")
}
committedOffsets = secondLatestBatchId.toStreamProgress(sources)
}
// update offset metadata
nextOffsets.metadata.foreach { metadata =>
OffsetSeqMetadata.setSessionConf(metadata, sparkSessionToRunBatches.conf)
offsetSeqMetadata = OffsetSeqMetadata(
metadata.batchWatermarkMs, metadata.batchTimestampMs, sparkSessionToRunBatches.conf)
watermarkTracker = WatermarkTracker(sparkSessionToRunBatches.conf)
watermarkTracker.setWatermark(metadata.batchWatermarkMs)
}
/* identify the current batch id: if commit log indicates we successfully processed the
* latest batch id in the offset log, then we can safely move to the next batch
* i.e., committedBatchId + 1 */
commitLog.getLatest() match {
case Some((latestCommittedBatchId, commitMetadata)) =>
if (latestBatchId == latestCommittedBatchId) {
/* The last batch was successfully committed, so we can safely process a
* new next batch but first:
* Make a call to getBatch using the offsets from previous batch.
* because certain sources (e.g., KafkaSource) assume on restart the last
* batch will be executed before getOffset is called again. */
availableOffsets.foreach {
case (source: Source, end: Offset) =>
val start = committedOffsets.get(source)
source.getBatch(start, end)
case nonV1Tuple =>
// The V2 API does not have the same edge case requiring getBatch to be called
// here, so we do nothing here.
}
currentBatchId = latestCommittedBatchId + 1
isCurrentBatchConstructed = false
committedOffsets ++= availableOffsets
watermarkTracker.setWatermark(
math.max(watermarkTracker.currentWatermark, commitMetadata.nextBatchWatermarkMs))
} else if (latestCommittedBatchId < latestBatchId - 1) {
logWarning(s"Batch completion log latest batch id is " +
s"${latestCommittedBatchId}, which is not trailing " +
s"batchid $latestBatchId by one")
}
case None => logInfo("no commit log present")
}
logInfo(s"Resuming at batch $currentBatchId with committed offsets " +
s"$committedOffsets and available offsets $availableOffsets")
case None => // We are starting this stream for the first time.
logInfo(s"Starting new streaming query.")
currentBatchId = 0
watermarkTracker = WatermarkTracker(sparkSessionToRunBatches.conf)
}
}
/**
* Returns true if there is any new data available to be processed.
*/
private def isNewDataAvailable: Boolean = {
availableOffsets.exists {
case (source, available) =>
committedOffsets
.get(source)
.map(committed => committed != available)
.getOrElse(true)
}
}
/**
* Attempts to construct a batch according to:
* - Availability of new data
* - Need for timeouts and state cleanups in stateful operators
*
* Returns true only if the next batch should be executed.
*
* Here is the high-level logic on how this constructs the next batch.
* - Check each source whether new data is available
* - Updated the query's metadata and check using the last execution whether there is any need
* to run another batch (for state clean up, etc.)
* - If either of the above is true, then construct the next batch by committing to the offset
* log that range of offsets that the next batch will process.
*/
private def constructNextBatch(noDataBatchesEnabled: Boolean): Boolean = withProgressLocked {
if (isCurrentBatchConstructed) return true
// Generate a map from each unique source to the next available offset.
val latestOffsets: Map[BaseStreamingSource, Option[Offset]] = uniqueSources.map {
case s: Source =>
updateStatusMessage(s"Getting offsets from $s")
reportTimeTaken("getOffset") {
(s, s.getOffset)
}
case s: RateControlMicroBatchReadSupport =>
updateStatusMessage(s"Getting offsets from $s")
reportTimeTaken("latestOffset") {
val startOffset = availableOffsets
.get(s).map(off => s.deserializeOffset(off.json))
.getOrElse(s.initialOffset())
(s, Option(s.latestOffset(startOffset)))
}
case s: MicroBatchReadSupport =>
updateStatusMessage(s"Getting offsets from $s")
reportTimeTaken("latestOffset") {
(s, Option(s.latestOffset()))
}
}.toMap
availableOffsets ++= latestOffsets.filter { case (_, o) => o.nonEmpty }.mapValues(_.get)
// Update the query metadata
offsetSeqMetadata = offsetSeqMetadata.copy(
batchWatermarkMs = watermarkTracker.currentWatermark,
batchTimestampMs = triggerClock.getTimeMillis())
// Check whether next batch should be constructed
val lastExecutionRequiresAnotherBatch = noDataBatchesEnabled &&
Option(lastExecution).exists(_.shouldRunAnotherBatch(offsetSeqMetadata))
val shouldConstructNextBatch = isNewDataAvailable || lastExecutionRequiresAnotherBatch
logTrace(
s"noDataBatchesEnabled = $noDataBatchesEnabled, " +
s"lastExecutionRequiresAnotherBatch = $lastExecutionRequiresAnotherBatch, " +
s"isNewDataAvailable = $isNewDataAvailable, " +
s"shouldConstructNextBatch = $shouldConstructNextBatch")
if (shouldConstructNextBatch) {
// Commit the next batch offset range to the offset log
updateStatusMessage("Writing offsets to log")
reportTimeTaken("walCommit") {
assert(offsetLog.add(currentBatchId,
availableOffsets.toOffsetSeq(sources, offsetSeqMetadata)),
s"Concurrent update to the log. Multiple streaming jobs detected for $currentBatchId")
logInfo(s"Committed offsets for batch $currentBatchId. " +
s"Metadata ${offsetSeqMetadata.toString}")
// NOTE: The following code is correct because runStream() processes exactly one
// batch at a time. If we add pipeline parallelism (multiple batches in flight at
// the same time), this cleanup logic will need to change.
// Now that we've updated the scheduler's persistent checkpoint, it is safe for the
// sources to discard data from the previous batch.
if (currentBatchId != 0) {
val prevBatchOff = offsetLog.get(currentBatchId - 1)
if (prevBatchOff.isDefined) {
prevBatchOff.get.toStreamProgress(sources).foreach {
case (src: Source, off) => src.commit(off)
case (readSupport: MicroBatchReadSupport, off) =>
readSupport.commit(readSupport.deserializeOffset(off.json))
case (src, _) =>
throw new IllegalArgumentException(
s"Unknown source is found at constructNextBatch: $src")
}
} else {
throw new IllegalStateException(s"batch ${currentBatchId - 1} doesn't exist")
}
}
// It is now safe to discard the metadata beyond the minimum number to retain.
// Note that purge is exclusive, i.e. it purges everything before the target ID.
if (minLogEntriesToMaintain < currentBatchId) {
offsetLog.purge(currentBatchId - minLogEntriesToMaintain)
commitLog.purge(currentBatchId - minLogEntriesToMaintain)
}
}
noNewData = false
} else {
noNewData = true
awaitProgressLockCondition.signalAll()
}
shouldConstructNextBatch
}
/**
* Processes any data available between `availableOffsets` and `committedOffsets`.
* @param sparkSessionToRunBatch Isolated [[SparkSession]] to run this batch with.
*/
private def runBatch(sparkSessionToRunBatch: SparkSession): Unit = {
logDebug(s"Running batch $currentBatchId")
// Request unprocessed data from all sources.
newData = reportTimeTaken("getBatch") {
availableOffsets.flatMap {
case (source: Source, available)
if committedOffsets.get(source).map(_ != available).getOrElse(true) =>
val current = committedOffsets.get(source)
val batch = source.getBatch(current, available)
assert(batch.isStreaming,
s"DataFrame returned by getBatch from $source did not have isStreaming=true\\n" +
s"${batch.queryExecution.logical}")
logDebug(s"Retrieving data from $source: $current -> $available")
Some(source -> batch.logicalPlan)
// TODO(cloud-fan): for data source v2, the new batch is just a new `ScanConfigBuilder`, but
// to be compatible with streaming source v1, we return a logical plan as a new batch here.
case (readSupport: MicroBatchReadSupport, available)
if committedOffsets.get(readSupport).map(_ != available).getOrElse(true) =>
val current = committedOffsets.get(readSupport).map {
off => readSupport.deserializeOffset(off.json)
}
val endOffset: OffsetV2 = available match {
case v1: SerializedOffset => readSupport.deserializeOffset(v1.json)
case v2: OffsetV2 => v2
}
val startOffset = current.getOrElse(readSupport.initialOffset)
val scanConfigBuilder = readSupport.newScanConfigBuilder(startOffset, endOffset)
logDebug(s"Retrieving data from $readSupport: $current -> $endOffset")
val (source, options) = readSupport match {
// `MemoryStream` is special. It's for test only and doesn't have a `DataSourceV2`
// implementation. We provide a fake one here for explain.
case _: MemoryStream[_] => MemoryStreamDataSource -> Map.empty[String, String]
// Provide a fake value here just in case something went wrong, e.g. the reader gives
// a wrong `equals` implementation.
case _ => readSupportToDataSourceMap.getOrElse(readSupport, {
FakeDataSourceV2 -> Map.empty[String, String]
})
}
Some(readSupport -> StreamingDataSourceV2Relation(
readSupport.fullSchema().toAttributes, source, options, readSupport, scanConfigBuilder))
case _ => None
}
}
// Replace sources in the logical plan with data that has arrived since the last batch.
val newBatchesPlan = logicalPlan transform {
case StreamingExecutionRelation(source, output) =>
newData.get(source).map { dataPlan =>
assert(output.size == dataPlan.output.size,
s"Invalid batch: ${Utils.truncatedString(output, ",")} != " +
s"${Utils.truncatedString(dataPlan.output, ",")}")
val aliases = output.zip(dataPlan.output).map { case (to, from) =>
Alias(from, to.name)(exprId = to.exprId, explicitMetadata = Some(from.metadata))
}
Project(aliases, dataPlan)
}.getOrElse {
LocalRelation(output, isStreaming = true)
}
}
// Rewire the plan to use the new attributes that were returned by the source.
val newAttributePlan = newBatchesPlan transformAllExpressions {
case ct: CurrentTimestamp =>
CurrentBatchTimestamp(offsetSeqMetadata.batchTimestampMs,
ct.dataType)
case cd: CurrentDate =>
CurrentBatchTimestamp(offsetSeqMetadata.batchTimestampMs,
cd.dataType, cd.timeZoneId)
}
val triggerLogicalPlan = sink match {
case _: Sink => newAttributePlan
case s: StreamingWriteSupportProvider =>
val writer = s.createStreamingWriteSupport(
s"$runId",
newAttributePlan.schema,
outputMode,
new DataSourceOptions(extraOptions.asJava))
WriteToDataSourceV2(new MicroBatchWritSupport(currentBatchId, writer), newAttributePlan)
case _ => throw new IllegalArgumentException(s"unknown sink type for $sink")
}
sparkSessionToRunBatch.sparkContext.setLocalProperty(
MicroBatchExecution.BATCH_ID_KEY, currentBatchId.toString)
reportTimeTaken("queryPlanning") {
lastExecution = new IncrementalExecution(
sparkSessionToRunBatch,
triggerLogicalPlan,
outputMode,
checkpointFile("state"),
runId,
currentBatchId,
offsetSeqMetadata)
lastExecution.executedPlan // Force the lazy generation of execution plan
}
val nextBatch =
new Dataset(sparkSessionToRunBatch, lastExecution, RowEncoder(lastExecution.analyzed.schema))
reportTimeTaken("addBatch") {
SQLExecution.withNewExecutionId(sparkSessionToRunBatch, lastExecution) {
sink match {
case s: Sink => s.addBatch(currentBatchId, nextBatch)
case _: StreamingWriteSupportProvider =>
// This doesn't accumulate any data - it just forces execution of the microbatch writer.
nextBatch.collect()
}
}
}
withProgressLocked {
watermarkTracker.updateWatermark(lastExecution.executedPlan)
commitLog.add(currentBatchId, CommitMetadata(watermarkTracker.currentWatermark))
committedOffsets ++= availableOffsets
}
logDebug(s"Completed batch ${currentBatchId}")
}
/** Execute a function while locking the stream from making an progress */
private[sql] def withProgressLocked[T](f: => T): T = {
awaitProgressLock.lock()
try {
f
} finally {
awaitProgressLock.unlock()
}
}
}
object MicroBatchExecution {
val BATCH_ID_KEY = "streaming.sql.batchId"
}
object MemoryStreamDataSource extends DataSourceV2
object FakeDataSourceV2 extends DataSourceV2
| sahilTakiar/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/streaming/MicroBatchExecution.scala | Scala | apache-2.0 | 26,125 |
/* sbt -- Simple Build Tool
* Copyright 2011 Mark Harrah
*/
package sbt
import java.util.regex.Pattern
import java.io.File
import Keys.{Streams, TaskStreams}
import Def.ScopedKey
import Aggregation.{KeyValue, Values}
import Types.idFun
import Highlight.{bold, showMatches}
import annotation.tailrec
object Output
{
final val DefaultTail = "> "
def last(keys: Values[_], streams: Streams, printLines: Seq[String] => Unit)(implicit display: Show[ScopedKey[_]]): Unit =
printLines( flatLines(lastLines(keys, streams))(idFun) )
def last(file: File, printLines: Seq[String] => Unit, tailDelim: String = DefaultTail): Unit =
printLines(tailLines(file, tailDelim))
def lastGrep(keys: Values[_], streams: Streams, patternString: String, printLines: Seq[String] => Unit)(implicit display: Show[ScopedKey[_]]): Unit =
{
val pattern = Pattern compile patternString
val lines = flatLines( lastLines(keys, streams) )(_ flatMap showMatches(pattern))
printLines( lines )
}
def lastGrep(file: File, patternString: String, printLines: Seq[String] => Unit, tailDelim: String = DefaultTail): Unit =
printLines(grep( tailLines(file, tailDelim), patternString) )
def grep(lines: Seq[String], patternString: String): Seq[String] =
lines flatMap showMatches(Pattern compile patternString)
def flatLines(outputs: Values[Seq[String]])(f: Seq[String] => Seq[String])(implicit display: Show[ScopedKey[_]]): Seq[String] =
{
val single = outputs.size == 1
outputs flatMap { case KeyValue(key, lines) =>
val flines = f(lines)
if(!single) bold(display(key)) +: flines else flines
}
}
def lastLines(keys: Values[_], streams: Streams): Values[Seq[String]] =
{
val outputs = keys map { (kv: KeyValue[_]) => KeyValue(kv.key, lastLines(kv.key, streams)) }
outputs.filterNot(_.value.isEmpty)
}
def lastLines(key: ScopedKey[_], mgr: Streams): Seq[String] = mgr.use(key) { s => IO.readLines(s.readText( Project.fillTaskAxis(key) )) }
def tailLines(file: File, tailDelim: String): Seq[String] = headLines(IO.readLines(file).reverse, tailDelim).reverse
@tailrec def headLines(lines: Seq[String], tailDelim: String): Seq[String] =
if(lines.isEmpty)
lines
else
{
val (first, tail) = lines.span { line => ! (line startsWith tailDelim) }
if(first.isEmpty) headLines(tail drop 1, tailDelim) else first
}
}
| olove/xsbt | main/src/main/scala/sbt/Output.scala | Scala | bsd-3-clause | 2,345 |
/*
* ____ ____ _____ ____ ___ ____
* | _ \\ | _ \\ | ____| / ___| / _/ / ___| Precog (R)
* | |_) | | |_) | | _| | | | | /| | | _ Advanced Analytics Engine for NoSQL Data
* | __/ | _ < | |___ | |___ |/ _| | | |_| | Copyright (C) 2010 - 2013 SlamData, Inc.
* |_| |_| \\_\\ |_____| \\____| /__/ \\____| All Rights Reserved.
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version
* 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
* the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this
* program. If not, see <http://www.gnu.org/licenses/>.
*
*/
package com.precog.surtr
import akka.dispatch.Future
import akka.pattern.ask
import akka.util.{Duration, Timeout}
import blueeyes.core.http.MimeType
import blueeyes.json._
import blueeyes.util.Clock
import com.precog.common.Path
import com.precog.common.ingest._
import com.precog.common.security._
import com.precog.muspelheim._
import com.precog.util.IOUtils
import com.precog.yggdrasil.actor._
import com.precog.yggdrasil.vfs._
import com.weiglewilczek.slf4s.Logging
import org.streum.configrity.Configuration
import java.util.UUID
import org.specs2.execute.EventuallyResults
import org.specs2.mutable._
import org.specs2.specification.Fragments
import scalaz._
import scalaz.syntax.comonad._
class NIHDBFileStoreSpec extends NIHDBTestActors with Specification with Logging {
class YggConfig extends NIHDBTestActorsConfig {
val tmpDir = IOUtils.createTmpDir("filestorespec").unsafePerformIO
val config = Configuration parse { "precog.storage.root = %s".format(tmpDir) }
val clock = blueeyes.util.Clock.System
val maxSliceSize = 10
logger.info("Running NIHDBFileStoreSpec under " + tmpDir)
}
object yggConfig extends YggConfig
implicit val timeout = new Timeout(5000)
val loremIpsum = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. Sed ac dolor ac velit consequat vestibulum at id dolor. Vivamus luctus mauris ac massa iaculis a cursus leo porta. Aliquam tellus ligula, mattis quis luctus sed, tempus id ante. Donec sagittis, ante pharetra tempor ultrices, purus massa tincidunt neque, ut tempus massa nisl non libero. Aliquam tincidunt commodo facilisis. Phasellus accumsan dapibus lorem ac aliquam. Nullam vitae ullamcorper risus. Praesent quis tellus lectus."
import TestStack._
"NIHDBPlatform storage" should {
"Properly store and retrieve files" in {
val testPath = Path("/store/this/somewhere")
(projectionsActor ? IngestData(Seq((0L, StoreFileMessage(testAPIKey, testPath, Authorities(testAccount), None, EventId.fromLong(42L), FileContent(loremIpsum.getBytes("UTF-8"), MimeType("text", "plain"), RawUTF8Encoding), Clock.System.instant, StreamRef.Create(UUID.randomUUID, true)))))).copoint must beLike {
case UpdateSuccess(_) => ok
}
(projectionsActor ? Read(testPath, Version.Current)).mapTo[ReadResult].copoint must beLike {
case ReadSuccess(_, blob : BlobResource) => blob.asString.run.copoint must beSome(loremIpsum)
}
}
"Properly handle atomic version updates" in {
import ResourceError._
val testPath = Path("/versioned/blob")
val streamId = UUID.randomUUID
(projectionsActor ? IngestData(Seq((0L, IngestMessage(testAPIKey, testPath, Authorities(testAccount), Seq(IngestRecord(EventId.fromLong(42L), JString("Foo!"))), None, Clock.System.instant, StreamRef.Create(streamId, false)))))).copoint must beLike {
case UpdateSuccess(_) => ok
}
// We haven't terminated the stream yet, so it shouldn't find anything
(projectionsActor ? Read(testPath, Version.Current)).mapTo[ReadResult].copoint must beLike {
case PathOpFailure(_, NotFound(_)) => ok
}
(projectionsActor ? IngestData(Seq((1L, IngestMessage(testAPIKey, testPath, Authorities(testAccount), Seq(IngestRecord(EventId.fromLong(42L), JString("Foo!"))), None, Clock.System.instant, StreamRef.Create(streamId, true)))))).copoint must beLike {
case UpdateSuccess(_) => ok
}
(projectionsActor ? Read(testPath, Version.Current)).mapTo[ReadResult].copoint must beLike {
case ReadSuccess(_, proj: NIHDBResource) => proj.db.length.copoint mustEqual 2
}
}
}
override def map(fs: => Fragments): Fragments = fs ^ step {
logger.info("Unlocking actor")
//projectionSystem.release
IOUtils.recursiveDelete(yggConfig.tmpDir).unsafePerformIO
}
}
| precog/platform | surtr/src/test/scala/com/precog/surtr/NIHDBFileStoreSpec.scala | Scala | agpl-3.0 | 4,924 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.stats
import java.lang.{Double => jDouble, Long => jLong}
import java.util.Date
import org.locationtech.jts.geom.Geometry
import org.geotools.feature.simple.SimpleFeatureBuilder
import org.junit.runner.RunWith
import org.locationtech.geomesa.utils.geotools.{GeoToolsDateFormat, SimpleFeatureTypes}
import org.locationtech.geomesa.utils.text.WKTUtils
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class TopKTest extends Specification {
val sft = SimpleFeatureTypes.createType("topk", "name:String,score:Long,height:Double,dtg:Date,*geom:Point:srid=4326")
val builder = new SimpleFeatureBuilder(sft)
val features1 = (0 until 100).map { i =>
if (i < 10) {
builder.addAll(Array[AnyRef]("name10", "10", "10.0", "2010-01-01T00:00:00.000Z", "POINT(10 0)"))
} else if (i < 15) {
builder.addAll(Array[AnyRef]("name15", "15", "15.0", "2015-01-01T00:00:00.000Z", "POINT(15 0)"))
} else if (i < 30) {
builder.addAll(Array[AnyRef]("name30", "30", "30.0", "2030-01-01T00:00:00.000Z", "POINT(30 0)"))
} else if (i < 50) {
builder.addAll(Array[AnyRef]("name50", "50", "50.0", "2050-01-01T00:00:00.000Z", "POINT(50 0)"))
} else {
builder.addAll(Array[AnyRef]("name100", "100", "100.0", "2100-01-01T00:00:00.000Z", "POINT(100 0)"))
}
builder.buildFeature(i.toString)
}
val features2 = (0 until 100).map { i =>
if (i < 10) {
builder.addAll(Array[AnyRef]("name10-2", "210", "10.2", "2010-01-01T02:00:00.000Z", "POINT(10 2)"))
} else if (i < 15) {
builder.addAll(Array[AnyRef]("name15-2", "215", "15.2", "2015-01-01T02:00:00.000Z", "POINT(15 2)"))
} else if (i < 30) {
builder.addAll(Array[AnyRef]("name30-2", "230", "30.2", "2030-01-01T02:00:00.000Z", "POINT(30 2)"))
} else if (i < 50) {
builder.addAll(Array[AnyRef]("name50-2", "250", "50.2", "2050-01-01T02:00:00.000Z", "POINT(50 2)"))
} else {
builder.addAll(Array[AnyRef]("name100-2", "2100", "100.2", "2100-01-01T02:00:00.000Z", "POINT(100 2)"))
}
builder.buildFeature(i.toString)
}
def createStat[T](attribute: String): TopK[T] = Stat(sft, s"TopK($attribute)").asInstanceOf[TopK[T]]
def stringStat = createStat[String]("name")
def longStat = createStat[jLong]("score")
def doubleStat = createStat[jDouble]("height")
def dateStat = createStat[Date]("dtg")
def geomStat = createStat[Geometry]("geom")
"TopK stat" should {
"work with strings" >> {
"be empty initially" >> {
val stat = stringStat
stat.isEmpty must beTrue
stat.topK(10) must beEmpty
}
"correctly calculate values" >> {
val stat = stringStat
features1.foreach(stat.observe)
stat.isEmpty must beFalse
stat.size mustEqual 5
stat.topK(10).toSeq mustEqual Seq(("name100", 50), ("name50", 20), ("name30", 15), ("name10", 10), ("name15", 5))
}
"serialize and deserialize" >> {
val stat = stringStat
features1.foreach(stat.observe)
val packed = StatSerializer(sft).serialize(stat)
val unpacked = StatSerializer(sft).deserialize(packed)
unpacked must beAnInstanceOf[TopK[String]]
unpacked.asInstanceOf[TopK[String]].size mustEqual stat.size
unpacked.asInstanceOf[TopK[String]].property mustEqual stat.property
unpacked.asInstanceOf[TopK[String]].toJson mustEqual stat.toJson
unpacked.isEquivalent(stat) must beTrue
}
"serialize and deserialize empty stats" >> {
val stat = stringStat
val packed = StatSerializer(sft).serialize(stat)
val unpacked = StatSerializer(sft).deserialize(packed)
unpacked must beAnInstanceOf[TopK[String]]
unpacked.asInstanceOf[TopK[String]].size mustEqual stat.size
unpacked.asInstanceOf[TopK[String]].property mustEqual stat.property
unpacked.asInstanceOf[TopK[String]].toJson mustEqual stat.toJson
unpacked.isEquivalent(stat) must beTrue
}
"deserialize as immutable value" >> {
val stat = stringStat
features1.foreach(stat.observe)
val packed = StatSerializer(sft).serialize(stat)
val unpacked = StatSerializer(sft).deserialize(packed, immutable = true)
unpacked must beAnInstanceOf[TopK[String]]
unpacked.asInstanceOf[TopK[String]].size mustEqual stat.size
unpacked.asInstanceOf[TopK[String]].property mustEqual stat.property
unpacked.asInstanceOf[TopK[String]].toJson mustEqual stat.toJson
unpacked.isEquivalent(stat) must beTrue
unpacked.clear must throwAn[Exception]
unpacked.+=(stat) must throwAn[Exception]
unpacked.observe(features1.head) must throwAn[Exception]
unpacked.unobserve(features1.head) must throwAn[Exception]
}
"combine two TopKs" >> {
val stat = stringStat
val stat2 = stringStat
features1.foreach(stat.observe)
features2.foreach(stat2.observe)
stat2.size mustEqual 5
stat2.topK(10).toSeq mustEqual Seq(("name100-2", 50), ("name50-2", 20), ("name30-2", 15), ("name10-2", 10), ("name15-2", 5))
stat += stat2
stat.size mustEqual 10
stat.topK(10).toSeq mustEqual Seq(("name100", 50), ("name100-2", 50), ("name50", 20), ("name50-2", 20),
("name30", 15), ("name30-2", 15), ("name10", 10), ("name10-2", 10), ("name15", 5), ("name15-2", 5))
stat2.size mustEqual 5
stat2.topK(10).toSeq mustEqual Seq(("name100-2", 50), ("name50-2", 20), ("name30-2", 15), ("name10-2", 10), ("name15-2", 5))
}
"clear" >> {
val stat = stringStat
features1.foreach(stat.observe)
stat.clear()
stat.isEmpty must beTrue
stat.size mustEqual 0
stat.topK(10).toSeq must beEmpty
}
}
"work with longs" >> {
"correctly calculate values" >> {
val stat = longStat
features1.foreach(stat.observe)
stat.isEmpty must beFalse
stat.size mustEqual 5
stat.topK(10).toSeq mustEqual Seq((100L, 50), (50L, 20), (30L, 15), (10L, 10), (15L, 5))
}
"serialize and deserialize" >> {
val stat = longStat
features1.foreach(stat.observe)
val packed = StatSerializer(sft).serialize(stat)
val unpacked = StatSerializer(sft).deserialize(packed)
unpacked must beAnInstanceOf[TopK[String]]
unpacked.asInstanceOf[TopK[String]].size mustEqual stat.size
unpacked.asInstanceOf[TopK[String]].property mustEqual stat.property
unpacked.asInstanceOf[TopK[String]].toJson mustEqual stat.toJson
unpacked.isEquivalent(stat) must beTrue
}
}
"work with doubles" >> {
"correctly calculate values" >> {
val stat = doubleStat
features1.foreach(stat.observe)
stat.isEmpty must beFalse
stat.size mustEqual 5
stat.topK(10).toSeq mustEqual Seq((100.0, 50), (50.0, 20), (30.0, 15), (10.0, 10), (15.0, 5))
}
"serialize and deserialize" >> {
val stat = doubleStat
features1.foreach(stat.observe)
val packed = StatSerializer(sft).serialize(stat)
val unpacked = StatSerializer(sft).deserialize(packed)
unpacked must beAnInstanceOf[TopK[String]]
unpacked.asInstanceOf[TopK[String]].size mustEqual stat.size
unpacked.asInstanceOf[TopK[String]].property mustEqual stat.property
unpacked.asInstanceOf[TopK[String]].toJson mustEqual stat.toJson
unpacked.isEquivalent(stat) must beTrue
}
}
"work with dates" >> {
def toDate(year: Int) = java.util.Date.from(java.time.LocalDateTime.parse(f"2$year%03d-01-01T00:00:00.000Z", GeoToolsDateFormat).toInstant(java.time.ZoneOffset.UTC))
"correctly calculate values" >> {
val stat = dateStat
features1.foreach(stat.observe)
stat.isEmpty must beFalse
stat.size mustEqual 5
stat.topK(10).toSeq mustEqual Seq((toDate(100), 50), (toDate(50), 20), (toDate(30), 15), (toDate(10), 10), (toDate(15), 5))
}
"serialize and deserialize" >> {
val stat = dateStat
features1.foreach(stat.observe)
val packed = StatSerializer(sft).serialize(stat)
val unpacked = StatSerializer(sft).deserialize(packed)
unpacked must beAnInstanceOf[TopK[String]]
unpacked.asInstanceOf[TopK[String]].size mustEqual stat.size
unpacked.asInstanceOf[TopK[String]].property mustEqual stat.property
unpacked.asInstanceOf[TopK[String]].toJson mustEqual stat.toJson
unpacked.isEquivalent(stat) must beTrue
}
}
"work with geometries" >> {
def toGeom(lon: Int) = WKTUtils.read(s"POINT($lon 0)")
"correctly calculate values" >> {
val stat = geomStat
features1.foreach(stat.observe)
stat.isEmpty must beFalse
stat.size mustEqual 5
stat.topK(10).toSeq mustEqual Seq((toGeom(100), 50), (toGeom(50), 20), (toGeom(30), 15), (toGeom(10), 10), (toGeom(15), 5))
}
"serialize and deserialize" >> {
val stat = geomStat
features1.foreach(stat.observe)
val packed = StatSerializer(sft).serialize(stat)
val unpacked = StatSerializer(sft).deserialize(packed)
unpacked must beAnInstanceOf[TopK[String]]
unpacked.asInstanceOf[TopK[String]].size mustEqual stat.size
unpacked.asInstanceOf[TopK[String]].property mustEqual stat.property
unpacked.asInstanceOf[TopK[String]].toJson mustEqual stat.toJson
unpacked.isEquivalent(stat) must beTrue
}
}
}
}
| elahrvivaz/geomesa | geomesa-utils/src/test/scala/org/locationtech/geomesa/utils/stats/TopKTest.scala | Scala | apache-2.0 | 10,183 |
package heisenberg
import java.io.StringWriter
import com.fasterxml.jackson.databind.ObjectMapper
import com.heisenberg.impl.StartBuilderImpl
import com.heisenberg.impl.instance.ProcessInstanceImpl
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import org.bson.types.ObjectId
import scala.concurrent.duration._
import com.heisenberg.load._
class GatlingRestDemo extends Simulation {
val httpConf = http
.baseURL("http://107.178.251.184")
.acceptHeader("application/json")
val headers = Map(
"Cache-Control" -> """no-cache""",
"Content-Type" -> """application/json; charset=UTF-8""",
"Pragma" -> """no-cache"""
)
val mapper = new ObjectMapper()
def writeValue(o: Any) = {
val sw = new StringWriter()
mapper.writeValue(sw, o)
sw.toString()
}
def readValue[T](str: String, clazz: Class[T]): T = {
mapper.readValue(str, clazz)
}
val userDataFeeder = new Feeder[String] {
def hasNext = true
def next() = {
try {
val startProcessInstanceRequest =
new StartBuilderImpl().processDefinitionName("load")
Map("json" -> writeValue(startProcessInstanceRequest))
} catch {
case e:Throwable => e.printStackTrace; Map()
}
}
}
val scn = scenario("Simple Create and Get")
.during(10 seconds) {
feed(userDataFeeder)
.exec(
http("start process")
.post("/start/")
.headers(headers)
.body(StringBody("${json}")).asJSON
.check(status.is(200))
.check(regex("""(.*)""").saveAs("processInstance"))
).exec{ session =>
val processInstanceStr = session.get("processInstance").as[String]
val processInstance = LoadHelper.parseJson(processInstanceStr)
val id = processInstance.getId()
val activityId = processInstance.findActivityInstanceByActivityDefinitionId("subTask").getId()
val stringBody = s"""{"processInstanceId": "${id}", "activityInstanceId": "${activityId}" }"""
session.set("stringBody", stringBody)
}.exec(
http("check activity")
.post("/message/")
.headers(headers)
.body(StringBody("${stringBody}")).asJSON
.check(status.is(200))
.check(regex("""\\{"id":"([^\\"]*)","processDefinitionId":"([^\\"]*)","start":"([^\\"]*)","end":"([^\\"]*)",(.)*\\}"""))
)
}
setUp(scn.inject(rampUsers(2) over 10.seconds)).protocols(httpConf)
}
| whitewalter/breaking-load | user-files/simulations/heisenberg/GatlingRestDemo.scala | Scala | apache-2.0 | 2,385 |
package controllers
import models.Menu
class MenuController {
}
object MenuData {
/**
* Some fake menu data so we can simulate retrievals.
*/
def getMenu(name: String): List[Menu] = name match {
case "footer" => List(
Menu(0, "About", "#aboutFooter"),
Menu(1, "Contact", "#contactFooter"),
Menu(2, "More", "#moreFooter"))
case "sidebar" => List(
Menu(0, "About", "#aboutSidebar"),
Menu(1, "Contact", "#contactSidebar"),
Menu(2, "More", "#moreSidebar"))
case "header" | _ => List(
Menu(0 ,"About", "#about"),
Menu(1, "Contact", "#contact"),
Menu(2, "More", "#more"))
}
} | gregoryboucher/resume-app | src/main/scala/app/controllers/MenuController.scala | Scala | mit | 617 |
package vulkan.wrapper.registry.command
import vulkan.wrapper.registry.controller.{VulkanController, VulkanFeature}
import vulkan.wrapper.registry.controller.controll.{VulkanControll, VulkanControllData, VulkanControllRequire}
import vulkan.wrapper.registry.controller.controlled.VulkanControlled
import vulkan.wrapper.registry.venum.VulkanEnumNormalEnum
import vulkan.wrapper.registry.vtype.VulkanType
import vulkan.wrapper.registry.{Registry, RegistryComponent, _}
import scala.xml.Node
class VulkanCommandNode(registry: Registry, node: Node) extends VulkanCommand(registry,node){
override val data: VulkanCommandData = new VulkanCommandData(registry,this,node)
override val name: String = (node \\ "proto").head @\\\\ "name"
}
object VulkanCommandNode {
def apply(registry: Registry): VulkanComponentMappedData[VulkanCommandNode] =
VulkanComponentMappedData(registry,(registry.xml \\ "commands" \\ "command").filter(_.attribute("alias").isEmpty).map(new VulkanCommandNode(registry,_)))
}
| MrInformatic/VulkanWrapper | src/vulkan/wrapper/registry/command/VulkanCommandNode.scala | Scala | mit | 1,001 |
package doodle
package image
package examples
import doodle.core._
import doodle.image.Image
import doodle.syntax._
import doodle.random._
import cats.syntax.all._
object Windswept {
import PathElement._
def randomColor(meanHue: Angle) =
for {
hue <- Random.normal(meanHue.toDegrees, 10.0) map (_.degrees)
} yield Color.hsl(hue, 0.8, 0.6)
val leafGreen: Random[Color] = randomColor(80.degrees)
val emeraldGreen: Random[Color] = randomColor(110.degrees)
def square(fill: Color): Image =
Image.rectangle(25, 25) fillColor fill strokeWidth 0.0
def randomSquare(fill: Random[Color]): Random[Image] =
(fill, fill, fill, fill) mapN { (f1, f2, f3, f4) =>
(square(f1) beside square(f2)) above (square(f3) beside square(f4))
}
val leafSquare = randomSquare(leafGreen)
val emeraldSquare = randomSquare(emeraldGreen)
def randomColumn(one: Random[Image],
two: Random[Image],
three: Random[Image],
four: Random[Image],
five: Random[Image]): Random[Image] =
(one, two, three, four, five) mapN { _ above _ above _ above _ above _ }
val columnOne =
randomColumn(emeraldSquare,
emeraldSquare,
emeraldSquare,
emeraldSquare,
emeraldSquare)
val columnTwo =
randomColumn(emeraldSquare,
emeraldSquare,
leafSquare,
emeraldSquare,
emeraldSquare)
val columnThree =
randomColumn(emeraldSquare,
leafSquare,
emeraldSquare,
leafSquare,
emeraldSquare)
val columnFour =
randomColumn(leafSquare,
emeraldSquare,
emeraldSquare,
emeraldSquare,
leafSquare)
val singleRepeat: Random[Image] =
(columnOne, columnTwo, columnThree, columnFour, columnThree, columnTwo) mapN {
_ beside _ beside _ beside _ beside _ beside _
}
val pattern: Random[Image] =
(singleRepeat, singleRepeat, singleRepeat, columnOne) mapN {
_ beside _ beside _ beside _
}
def randomPoint(x: Random[Double], y: Random[Double]): Random[Point] =
(x, y) mapN { (x, y) =>
Point.cartesian(x, y)
}
def normalPoint(point: Point, stdDev: Double = 50): Random[Point] =
randomPoint(Random.normal(point.x, stdDev), Random.normal(point.y, stdDev))
def randomBezier(cp1: Point,
cp2: Point,
end: Point,
stdDev: Double): Random[BezierCurveTo] = {
(normalPoint(cp1, stdDev),
normalPoint(cp2, stdDev),
normalPoint(end, stdDev)) mapN { (cp1, cp2, end) =>
BezierCurveTo(cp1, cp2, end)
}
}
val tendril: Random[Image] =
for {
stroke <- randomColor(25.degrees) map (_.fadeOut(0.4.normalized))
offset = -425
start = Point.cartesian(offset.toDouble, 0)
end <- Random.normal(800, 30)
} yield
Image.openPath(Seq(
moveTo(start),
lineTo(Point.cartesian(end + offset, 0)))) strokeColor stroke strokeWidth 1.0
val tendrils: Random[Image] =
(-50 to 50).foldLeft(tendril) { (randomImage, i) =>
for {
accum <- randomImage
t <- tendril
} yield (t at (0, i.toDouble)) on accum
}
def image =
(for {
t1 <- tendrils
t2 <- tendrils
t3 <- tendrils
p1 <- pattern
p2 <- pattern
p3 <- pattern
} yield (t1 on p1) above (t2 on p2) above (t3 on p3)).run
}
| underscoreio/doodle | image/shared/src/main/scala/doodle/image/examples/Windswept.scala | Scala | apache-2.0 | 3,567 |
import org.junit.runner._
import org.specs2.mutable._
import org.specs2.runner._
import play.api.libs.json.{JsValue, Json}
import play.api.mvc
import play.api.test.Helpers._
import play.api.test._
import scala.concurrent.Future
@RunWith(classOf[JUnitRunner])
class FunctionalTest extends Specification {
val username = "username"
val password = "password"
"Application" should {
"send OK on a signup request" in new WithApplication {
signUp(username, password)
}
"not allow login if a user that doesn't exist" in new WithApplication {
login(username, password, BAD_REQUEST)
}
"allow login of a signed up user" in new WithApplication {
signUp(username, password)
login(username, password)
}
"login should return a jwtToken" in new WithApplication {
signUp(username, password)
val loginJson = login(username, password)
(loginJson \\ "jwtToken").as[String]
}
"not allow accessing the secret page without a jwtToken" in new WithApplication {
val req = post("/secret")
val Some(result) = route(req)
status(result) must equalTo(BAD_REQUEST)
}
"not allow accessing the secret page with an invalid jwtToken" in new WithApplication {
val someRandomToken = "some.random.token"
secretPage(someRandomToken, FORBIDDEN)
}
"allow accessing the secret page with a valid jwtToken" in new WithApplication {
signUp(username, password)
val loginJson = login(username, password)
val jwtToken = (loginJson \\ "jwtToken").as[String]
secretPage(jwtToken)
}
"not allow accessing the logout page without a jwtToken" in new WithApplication {
val req = post("/logout")
val Some(result) = route(req)
status(result) must equalTo(BAD_REQUEST)
}
"not allow accessing the logout page with an invalid jwtToken" in new WithApplication {
val someRandomToken = "some.random.token"
logout(someRandomToken, FORBIDDEN)
}
"allow accessing the logout page with a valid jwtToken" in new WithApplication {
signUp(username, password)
val loginJson = login(username, password)
val jwtToken = (loginJson \\ "jwtToken").as[String]
logout(jwtToken)
}
"not allow accessing the logout page twice with the same jwtToken" in new WithApplication {
signUp(username, password)
val loginJson = login(username, password)
val jwtToken = (loginJson \\ "jwtToken").as[String]
logout(jwtToken)
logout(jwtToken, expectedStatus = FORBIDDEN)
}
"not allow accessing the secret page after logging out" in new WithApplication {
signUp(username, password)
val loginJson = login(username, password)
val jwtToken = (loginJson \\ "jwtToken").as[String]
secretPage(jwtToken)
logout(jwtToken)
secretPage(jwtToken, expectedStatus = FORBIDDEN)
}
}
def signUp(username: String, password: String, expectedStatus: Int = CREATED) = {
val signUpRequest = postJson("/signup", Json.obj("username" -> username, "password" -> password))
val Some(result) = route(signUpRequest)
status(result) must equalTo(expectedStatus)
}
def login(username: String, password: String, expectedStatus: Int = OK) = {
val loginRequest = postJson("/login", Json.obj("username" -> username, "password" -> password))
val Some(result) = route(loginRequest)
status(result) must equalTo(expectedStatus)
jsonResult(result)
}
def secretPage(jwtToken: String, expectedStatus: Int = OK) = {
val secretPageRequest = postJson("/secret", Json.obj("jwtToken" -> jwtToken))
val Some(result) = route(secretPageRequest)
status(result) must equalTo(expectedStatus)
jsonResult(result)
}
def logout(jwtToken: String, expectedStatus: Int = ACCEPTED) = {
val secretPageRequest = postJson("/logout", Json.obj("jwtToken" -> jwtToken))
val Some(result) = route(secretPageRequest)
status(result) must equalTo(expectedStatus)
}
def postJson(uri: String, body: JsValue) = FakeRequest(POST, uri, FakeHeaders(), body)
def post(uri: String) = FakeRequest(POST, uri)
def jsonResult(result: Future[mvc.Result]): JsValue = {
contentType(result) must beSome("application/json")
Json.parse(contentAsString(result))
}
}
| GMadorell/play-jwt | test/FunctionalTest.scala | Scala | mit | 4,293 |
package io.github.oxlade39.storrent.peer
import org.scalatest.{BeforeAndAfterAll, WordSpecLike}
import akka.testkit.{TestProbe, ImplicitSender, TestKit}
import akka.actor._
import org.scalatest.MustMatchers
import io.github.oxlade39.storrent.test.util.{ForwardingParent, FileOps}
import akka.io.{Tcp, IO}
import java.net.InetSocketAddress
import io.github.oxlade39.storrent.core.Torrent
import concurrent.duration._
/**
* @author dan
*/
class PeerConnectionTest extends TestKit(ActorSystem("PeerConnectionTest"))
with WordSpecLike with BeforeAndAfterAll with ImplicitSender with MustMatchers with FileOps {
import PeerConnectionTest._
import io.github.oxlade39.storrent.piece.PieceManager._
override def afterAll(): Unit = {
system.terminate()
}
"PeerConnection" must {
"attempt to handshake with peer" in {
val fakePeer = new FakePeer
val peer: Peer = new Peer(fakePeer.peerAddress)
val peerConnection = system.actorOf(PeerConnection.props(
peer,
Torrent("examples" / "ubuntu.torrent"),
TestProbe().ref),
"under-test")
fakePeer.connectsWithPeer(peer)
val hs = fakePeer.receivesHandshake()
val myPeerId: PeerId = PeerId()
fakePeer.sendsHandshake(hs.copy(peerId = myPeerId))
}
"notifies piece manager with bitfield of new peer" in {
val fakePeer = new FakePeer
val fakePieceManager = TestProbe("fakePieceManager")
val peer: Peer = Peer(fakePeer.peerAddress)
val torrent: Torrent = Torrent("examples" / "ubuntu.torrent")
val peerConnection = system.actorOf(PeerConnection.props(peer, torrent, fakePieceManager.ref))
fakePeer.connectsWithPeer(peer)
val hs = fakePeer.receivesHandshake()
val myPeerId: PeerId = PeerId()
fakePeer.sendsHandshake(hs.copy(peerId = myPeerId))
val allPieces = Bitfield(torrent.pieceHashes.map(_ => true))
fakePeer.sends(allPieces) // the fakePeer has all the pieces
val received = fakePieceManager.expectMsgType[PeerHasPieces]
assert(received == PeerHasPieces(peer.id, allPieces))
}
}
}
object PeerConnectionTest {
def fakePeerProps(remoteControl: ActorRef)= Props(new FakePeerActor(remoteControl))
class FakePeer(implicit val system: ActorSystem) {
val remoteControl = TestProbe("fake-peer-remote")
val actor = system.actorOf(
Props(new ForwardingParent(fakePeerProps(remoteControl.ref), remoteControl.ref)))
val peerAddress = remoteControl.expectMsgType[Tcp.Bound].localAddress
def sends(m: Message) = {
remoteControl.send(actor, m)
}
def sendsHandshake(hs: Handshake) = {
remoteControl.send(actor, hs)
}
def connectsWithPeer(p: Peer) = {
remoteControl.expectMsgType[Tcp.Connected]
}
def receivesHandshake(): Handshake = {
val received = remoteControl.expectMsgType[Tcp.Received]
val handshake: Option[Handshake] = Handshake.parse(received.data)
handshake.getOrElse(throw new java.lang.AssertionError(s"Couldn't parse message [${received.data.utf8String}}] into handshake"))
}
}
class FakePeerActor(remoteControl: ActorRef) extends Actor with ActorLogging {
import context._
IO(Tcp) ! Tcp.Bind(self, new InetSocketAddress(0))
var connection = Option.empty[ActorRef]
def receive = {
case bound: Tcp.Bound => parent ! bound
case msg: Message if sender == remoteControl =>
log.debug("sending message {} to {}", msg, connection)
connection foreach (_ ! Tcp.Write(msg.encode))
case hs: Handshake if sender == remoteControl =>
connection foreach (_ ! Tcp.Write(hs.encoded))
case Tcp.Closed => stop(self)
case Tcp.Close => stop(self)
case c: Tcp.Connected =>
connection = Some(sender)
sender ! Tcp.Register(self)
remoteControl forward c
case other => remoteControl forward other
}
}
} | oxlade39/STorrent | src/test/scala/io/github/oxlade39/storrent/peer/PeerConnectionTest.scala | Scala | apache-2.0 | 3,927 |
package coursier.launcher
import java.io.File
import java.nio.file.{Path, Paths}
import java.util.jar.{Attributes => JarAttributes}
import java.util.zip.ZipEntry
import coursier.launcher.internal.Windows
import dataclass._
sealed abstract class Parameters extends Product with Serializable {
def isNative: Boolean = false
}
object Parameters {
@data class Assembly(
files: Seq[File] = Nil,
mainClass: Option[String] = None,
attributes: Seq[(JarAttributes.Name, String)] = Nil,
rules: Seq[MergeRule] = MergeRule.default,
preambleOpt: Option[Preamble] = Some(Preamble()),
extraZipEntries: Seq[(ZipEntry, Array[Byte])] = Nil,
@since("2.0.0-RC6-27")
baseManifest: Option[Array[Byte]] = None
) extends Parameters {
def withMainClass(mainClass: String): Assembly =
withMainClass(Some(mainClass))
def withPreamble(preamble: Preamble): Assembly =
withPreambleOpt(Some(preamble))
def finalAttributes: Seq[(JarAttributes.Name, String)] =
mainClass
.map(c => JarAttributes.Name.MAIN_CLASS -> c)
.toSeq ++
attributes
}
@data class Bootstrap(
content: Seq[ClassLoaderContent],
mainClass: String,
javaProperties: Seq[(String, String)] = Nil,
bootstrapResourcePathOpt: Option[String] = None,
deterministic: Boolean = true,
preambleOpt: Option[Preamble] = Some(Preamble()),
proguarded: Boolean = true,
disableJarChecking: Option[Boolean] = None,
hybridAssembly: Boolean = false,
extraZipEntries: Seq[(ZipEntry, Array[Byte])] = Nil,
@since("2.0.4")
python: Boolean = false
) extends Parameters {
def withPreamble(preamble: Preamble): Bootstrap =
withPreambleOpt(Some(preamble))
def hasResources: Boolean =
content.exists { c =>
c.entries.exists {
case _: ClassPathEntry.Resource => true
case _ => false
}
}
def finalDisableJarChecking: Boolean =
disableJarChecking.getOrElse(hasResources)
def finalPreambleOpt: Option[Preamble] =
if (finalDisableJarChecking)
preambleOpt.map { p =>
p.withJavaOpts("-Dsun.misc.URLClassPath.disableJarChecking" +: p.javaOpts)
}
else
preambleOpt
}
@data class ManifestJar(
classpath: Seq[File],
mainClass: String,
preambleOpt: Option[Preamble] = Some(Preamble())
) extends Parameters {
def withPreamble(preamble: Preamble): ManifestJar =
withPreambleOpt(Some(preamble))
}
@data class NativeImage(
mainClass: String,
fetch: Seq[String] => Seq[File],
jars: Seq[File] = Nil,
graalvmVersion: Option[String] = None,
graalvmJvmOptions: Seq[String] = NativeImage.defaultGraalvmJvmOptions,
graalvmOptions: Seq[String] = Nil,
javaHome: Option[File] = None, // needs a "JVMCI-enabled JDK" (like GraalVM)
nameOpt: Option[String] = None,
verbosity: Int = 0,
intermediateAssembly: Boolean = false,
windowsPathExtensions: Option[Seq[String]] =
if (Windows.isWindows) Some(Windows.pathExtensions) else None,
isWindows: Boolean = Windows.isWindows
) extends Parameters {
override def isNative: Boolean = true
def withJavaHome(home: File): NativeImage =
withJavaHome(Some(home))
}
object NativeImage {
def defaultGraalvmJvmOptions: Seq[String] =
Seq("-Xmx3g")
}
@data class ScalaNative(
fetch: Seq[String] => Seq[File],
mainClass: String,
nativeVersion: String,
jars: Seq[File] = Nil,
options: ScalaNative.ScalaNativeOptions = ScalaNative.ScalaNativeOptions(),
log: String => Unit = s => System.err.println(s),
verbosity: Int = 0
) extends Parameters {
override def isNative: Boolean = true
}
object ScalaNative {
@data class ScalaNativeOptions(
gcOpt: Option[String] = None,
modeOpt: Option[String] = None,
linkStubs: Boolean = true,
clangOpt: Option[Path] = None,
clangppOpt: Option[Path] = None,
prependDefaultLinkingOptions: Boolean = true,
linkingOptions: Seq[String] = Nil,
prependDefaultCompileOptions: Boolean = true,
prependLdFlags: Boolean = true,
compileOptions: Seq[String] = Nil,
targetTripleOpt: Option[String] = None,
nativeLibOpt: Option[Path] = None,
workDirOpt: Option[Path] = None,
keepWorkDir: Boolean = false
)
}
/** For test purposes */
@data class DummyNative() extends Parameters {
override def isNative: Boolean = true
}
}
| alexarchambault/coursier | modules/launcher/src/main/scala/coursier/launcher/Parameters.scala | Scala | apache-2.0 | 4,518 |
package org.dberg.hubot.listeners
import org.dberg.hubot.Hubot
import org.dberg.hubot.event.Event
import org.dberg.hubot.listeners.Listener.CallbackSuccess
import org.dberg.hubot.models._
class TestListener(hubot: Hubot) extends Listener(hubot, ListenerType.Hear) {
val callback: Callback = {
case message @ Body(r"listen1\\s+(.*)$phrase") =>
val lastMessage = brain.get[String]("lastmessage").getOrElse("")
val resp = "scalabot heard you mention " + phrase + " !, the last thing you said was " + lastMessage
brain.set[String]("lastmessage", message.body)
logger.debug("Running callback for listener TestListener, sending response " + resp)
event.emit(Event("testid", Map("test" -> "value")))
robot.send(Message(message.user, resp, message.messageType))
}
val helpString = Some("listen1 -> Responds to anything and repeats it ")
}
| denen99/hubot-scala | src/main/scala/org/dberg/hubot/listeners/TestListener.scala | Scala | apache-2.0 | 880 |
/**
* Copyright (C) 2007 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.xforms.control
import org.orbeon.oxf.xforms.control.XFormsControl.MutableControlProperty
import org.orbeon.oxf.xforms.analysis.controls.{StaticLHHASupport, LHHAAnalysis}
import org.orbeon.oxf.xml.XMLUtils
import org.orbeon.oxf.xforms.{XFormsConstants, XFormsUtils}
import org.orbeon.oxf.xforms.control.LHHASupport.LHHAProperty
class MutableLHHProperty(control: XFormsControl, lhhaType: XFormsConstants.LHHA, supportsHTML: Boolean)
extends MutableLHHAProperty(control, lhhaType, supportsHTML) {
protected def evaluateValueImpl =
evaluateOne(control.staticControl.asInstanceOf[StaticLHHASupport].lhh(lhhaType.name).get)
}
class MutableAlertProperty(control: XFormsSingleNodeControl, lhhaType: XFormsConstants.LHHA, supportsHTML: Boolean)
extends MutableLHHAProperty(control, lhhaType, supportsHTML) {
protected def evaluateValueImpl = {
val activeAlertsOpt = LHHASupport.gatherActiveAlerts(control)
val valuesWithIsHTML =
for {
(_, activeAlerts) ← activeAlertsOpt.toList
activeAlert ← activeAlerts
valueWithIsHTML ← evaluateOne(activeAlert)
} yield
valueWithIsHTML
if (valuesWithIsHTML.size < 2)
valuesWithIsHTML.headOption
else {
// Combine multiple values as a single HTML value using ul/li
val combined = (
valuesWithIsHTML
map { case (value, isHTML) ⇒ if (! isHTML) XMLUtils.escapeXMLMinimal(value) else value }
mkString ("<ul><li>", "</li><li>", "</li></ul>")
)
Some(combined, true)
}
}
}
// Mutable LHHA property
abstract class MutableLHHAProperty(control: XFormsControl, lhhaType: XFormsConstants.LHHA, supportsHTML: Boolean)
extends MutableControlProperty[String]
with LHHAProperty {
private var _isHTML = false
protected def isRelevant = control.isRelevant
protected def wasRelevant = control.wasRelevant
protected def evaluateValue() =
evaluateValueImpl match {
case Some((value: String, isHTML)) ⇒
_isHTML = isHTML
value
case _ ⇒
_isHTML = false
null
}
def escapedValue() = {
val rawValue = value()
if (_isHTML)
XFormsControl.getEscapedHTMLValue(control.getLocationData, rawValue)
else
XMLUtils.escapeXMLMinimal(rawValue)
}
def isHTML = {
value()
_isHTML
}
protected override def markDirty() {
super.markDirty()
_isHTML = false
}
protected def requireUpdate =
control.containingDocument.getXPathDependencies.requireLHHAUpdate(lhhaType.name, control.getPrefixedId)
protected def notifyCompute() =
control.containingDocument.getXPathDependencies.notifyComputeLHHA()
protected def notifyOptimized() =
control.containingDocument.getXPathDependencies.notifyOptimizeLHHA()
override def copy: MutableLHHAProperty =
super.copy.asInstanceOf[MutableLHHAProperty]
protected def evaluateValueImpl: Option[(String, Boolean)]
// Evaluate the value of a LHHA related to this control
// Can return null
protected def evaluateOne(lhhaAnalysis: LHHAAnalysis) = {
val contextStack = control.getContextStack
val lhhaElement = lhhaAnalysis.element
val tempContainsHTML = Array(false)
val result =
if (lhhaAnalysis.isLocal) {
// LHHA is direct child of control, evaluate within context
contextStack.setBinding(control.bindingContext)
contextStack.pushBinding(lhhaElement, control.effectiveId, lhhaAnalysis.scope)
val result = Option(
XFormsUtils.getElementValue(
control.lhhaContainer,
contextStack,
control.effectiveId,
lhhaElement,
supportsHTML,
lhhaAnalysis.defaultToHTML,
tempContainsHTML)
)
contextStack.popBinding()
result
} else {
// LHHA is somewhere else, assumed as a child of xf:* or xxf:*
// TODO: This whole code sounds insanely complicated.
// LHHA elements should be present in the tree and we should 1) resolve them and 2) obtain their context.
// Find context object for XPath evaluation
val contextElement = lhhaElement.getParent
val contextStaticId = XFormsUtils.getElementId(contextElement)
val contextEffectiveId =
if ((contextStaticId eq null) || contextStaticId == "#document") {
// Assume we are at the top-level
contextStack.resetBindingContext()
control.container.getFirstControlEffectiveId
} else {
// Not at top-level, find containing object
val ancestorContextControl =
findAncestorContextControl(contextStaticId, XFormsUtils.getElementId(lhhaElement))
if (ancestorContextControl ne null) {
contextStack.setBinding(ancestorContextControl.bindingContext)
ancestorContextControl.effectiveId
} else
null
}
if (contextEffectiveId ne null) {
// Push binding relative to context established above and evaluate
contextStack.pushBinding(lhhaElement, contextEffectiveId, lhhaAnalysis.scope)
val result = Option(
XFormsUtils.getElementValue(
control.container,
contextStack,
control.effectiveId,
lhhaElement,
supportsHTML,
lhhaAnalysis.defaultToHTML,
tempContainsHTML
)
)
contextStack.popBinding()
result
} else
// Do as if there was no LHHA
None
}
result map (_ → tempContainsHTML(0))
}
private def findAncestorContextControl(contextStaticId: String, lhhaStaticId: String): XFormsControl = {
// NOTE: LHHA element must be in the same resolution scope as the current control (since @for refers to @id)
val lhhaScope = control.getResolutionScope
val lhhaPrefixedId = lhhaScope.prefixedIdForStaticId(lhhaStaticId)
// Assume that LHHA element is within same repeat iteration as its related control
val contextPrefixedId = XFormsUtils.getRelatedEffectiveId(lhhaPrefixedId, contextStaticId)
val contextEffectiveId = contextPrefixedId + XFormsUtils.getEffectiveIdSuffixWithSeparator(control.effectiveId)
var ancestorObject = control.container.getContainingDocument.getObjectByEffectiveId(contextEffectiveId)
while (ancestorObject.isInstanceOf[XFormsControl]) {
val ancestorControl = ancestorObject.asInstanceOf[XFormsControl]
if (ancestorControl.getResolutionScope == lhhaScope) {
// Found ancestor in right scope
return ancestorControl
}
ancestorObject = ancestorControl.parent
}
null
}
}
| martinluther/orbeon-forms | src/main/scala/org/orbeon/oxf/xforms/control/MutableLHHAProperty.scala | Scala | lgpl-2.1 | 8,412 |
/* sbt -- Simple Build Tool
* Copyright 2009, 2010 Mark Harrah
*/
package xsbt.boot
import java.io.File
// The entry point to the launcher
object Boot {
def main(args: Array[String]) {
val config = parseArgs(args)
// If we havne't exited, we set up some hooks and launch
System.clearProperty("scala.home") // avoid errors from mixing Scala versions in the same JVM
System.setProperty("jline.shutdownhook", "false") // shutdown hooks cause class loader leaks
System.setProperty("jline.esc.timeout", "0") // starts up a thread otherwise
CheckProxy()
run(config)
}
def parseArgs(args: Array[String]): LauncherArguments = {
@annotation.tailrec
def parse(args: List[String], isLocate: Boolean, remaining: List[String]): LauncherArguments =
args match {
case "--version" :: rest =>
println("sbt launcher version " + Package.getPackage("xsbt.boot").getImplementationVersion)
exit(1)
case "--locate" :: rest => parse(rest, true, remaining)
case next :: rest => parse(rest, isLocate, next :: remaining)
case Nil => new LauncherArguments(remaining.reverse, isLocate)
}
parse(args.toList, false, Nil)
}
// this arrangement is because Scala does not always properly optimize away
// the tail recursion in a catch statement
final def run(args: LauncherArguments): Unit = runImpl(args) match {
case Some(newArgs) => run(newArgs)
case None => ()
}
private def runImpl(args: LauncherArguments): Option[LauncherArguments] =
try
Launch(args) map exit
catch {
case b: BootException => errorAndExit(b.toString)
case r: xsbti.RetrieveException => errorAndExit("Error: " + r.getMessage)
case r: xsbti.FullReload => Some(new LauncherArguments(r.arguments.toList, false))
case e: Throwable =>
e.printStackTrace
errorAndExit(Pre.prefixError(e.toString))
}
private def errorAndExit(msg: String): Nothing =
{
System.out.println(msg)
exit(1)
}
private def exit(code: Int): Nothing =
System.exit(code).asInstanceOf[Nothing]
}
| niktrop/sbt | launch/src/main/scala/xsbt/boot/Boot.scala | Scala | bsd-3-clause | 2,167 |
/** Copyright 2015 TappingStone, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prediction.tools.console
import io.prediction.data.storage
import grizzled.slf4j.Logging
case class AppArgs(
id: Option[Int] = None,
name: String = "",
channel: String = "",
dataDeleteChannel: Option[String] = None,
all: Boolean = false,
force: Boolean = false,
description: Option[String] = None)
object App extends Logging {
def create(ca: ConsoleArgs): Int = {
val apps = storage.Storage.getMetaDataApps()
// get the client in the beginning so error exit right away if can't access client
val events = storage.Storage.getLEvents()
apps.getByName(ca.app.name) map { app =>
error(s"App ${ca.app.name} already exists. Aborting.")
1
} getOrElse {
ca.app.id.map { id =>
apps.get(id) map { app =>
error(
s"App ID ${id} already exists and maps to the app '${app.name}'. " +
"Aborting.")
return 1
}
}
val appid = apps.insert(storage.App(
id = ca.app.id.getOrElse(0),
name = ca.app.name,
description = ca.app.description))
appid map { id =>
val dbInit = events.init(id)
val r = if (dbInit) {
info(s"Initialized Event Store for this app ID: ${id}.")
val accessKeys = storage.Storage.getMetaDataAccessKeys
val accessKey = accessKeys.insert(storage.AccessKey(
key = ca.accessKey.accessKey,
appid = id,
events = Seq()))
accessKey map { k =>
info("Created new app:")
info(s" Name: ${ca.app.name}")
info(s" ID: ${id}")
info(s"Access Key: ${k}")
0
} getOrElse {
error(s"Unable to create new access key.")
1
}
} else {
error(s"Unable to initialize Event Store for this app ID: ${id}.")
// revert back the meta data change
try {
apps.delete(id)
0
} catch {
case e: Exception =>
error(s"Failed to revert back the App meta-data change.", e)
error(s"The app ${ca.app.name} CANNOT be used!")
error(s"Please run 'pio app delete ${ca.app.name}' " +
"to delete this app!")
1
}
}
events.close()
r
} getOrElse {
error(s"Unable to create new app.")
1
}
}
}
def list(ca: ConsoleArgs): Int = {
val apps = storage.Storage.getMetaDataApps.getAll().sortBy(_.name)
val accessKeys = storage.Storage.getMetaDataAccessKeys
val title = "Name"
val ak = "Access Key"
info(f"$title%20s | ID | $ak%64s | Allowed Event(s)")
apps foreach { app =>
val keys = accessKeys.getByAppid(app.id)
keys foreach { k =>
val events =
if (k.events.size > 0) k.events.sorted.mkString(",") else "(all)"
info(f"${app.name}%20s | ${app.id}%4d | ${k.key}%64s | $events%s")
}
}
info(s"Finished listing ${apps.size} app(s).")
0
}
def show(ca: ConsoleArgs): Int = {
val apps = storage.Storage.getMetaDataApps
val accessKeys = storage.Storage.getMetaDataAccessKeys
val channels = storage.Storage.getMetaDataChannels
apps.getByName(ca.app.name) map { app =>
info(s" App Name: ${app.name}")
info(s" App ID: ${app.id}")
info(s" Description: ${app.description.getOrElse("")}")
val keys = accessKeys.getByAppid(app.id)
var firstKey = true
keys foreach { k =>
val events =
if (k.events.size > 0) k.events.sorted.mkString(",") else "(all)"
if (firstKey) {
info(f" Access Key: ${k.key}%s | ${events}%s")
firstKey = false
} else {
info(f" ${k.key}%s | ${events}%s")
}
}
val chans = channels.getByAppid(app.id)
var firstChan = true
val titleName = "Channel Name"
val titleID = "Channel ID"
chans.foreach { ch =>
if (firstChan) {
info(f" Channels: ${titleName}%16s | ${titleID}%10s ")
firstChan = false
}
info(f" ${ch.name}%16s | ${ch.id}%10s")
}
0
} getOrElse {
error(s"App ${ca.app.name} does not exist. Aborting.")
1
}
}
def delete(ca: ConsoleArgs): Int = {
val apps = storage.Storage.getMetaDataApps
val accesskeys = storage.Storage.getMetaDataAccessKeys
val channels = storage.Storage.getMetaDataChannels
val events = storage.Storage.getLEvents()
val status = apps.getByName(ca.app.name) map { app =>
info(s"The following app (including all channels) will be deleted. Are you sure?")
info(s" App Name: ${app.name}")
info(s" App ID: ${app.id}")
info(s" Description: ${app.description.getOrElse("")}")
val chans = channels.getByAppid(app.id)
var firstChan = true
val titleName = "Channel Name"
val titleID = "Channel ID"
chans.foreach { ch =>
if (firstChan) {
info(f" Channels: ${titleName}%16s | ${titleID}%10s ")
firstChan = false
}
info(f" ${ch.name}%16s | ${ch.id}%10s")
}
val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ")
choice match {
case "YES" => {
// delete channels
val delChannelStatus: Seq[Int] = chans.map { ch =>
if (events.remove(app.id, Some(ch.id))) {
info(s"Removed Event Store of the channel ID: ${ch.id}")
try {
channels.delete(ch.id)
info(s"Deleted channel ${ch.name}")
0
} catch {
case e: Exception =>
error(s"Error deleting channel ${ch.name}.", e)
1
}
} else {
error(s"Error removing Event Store of the channel ID: ${ch.id}.")
return 1
}
}
if (delChannelStatus.exists(_ != 0)) {
error("Error occurred while deleting channels. Aborting.")
return 1
}
try {
events.remove(app.id)
info(s"Removed Event Store for this app ID: ${app.id}")
} catch {
case e: Exception =>
error(s"Error removing Event Store for this app. Aborting.", e)
return 1
}
accesskeys.getByAppid(app.id) foreach { key =>
try {
accesskeys.delete(key.key)
info(s"Removed access key ${key.key}")
} catch {
case e: Exception =>
error(s"Error removing access key ${key.key}. Aborting.", e)
return 1
}
}
try {
apps.delete(app.id)
info(s"Deleted app ${app.name}.")
} catch {
case e: Exception =>
error(s"Error deleting app ${app.name}. Aborting.", e)
return 1
}
info("Done.")
0
}
case _ =>
info("Aborted.")
0
}
} getOrElse {
error(s"App ${ca.app.name} does not exist. Aborting.")
1
}
events.close()
status
}
def dataDelete(ca: ConsoleArgs): Int = {
if (ca.app.all) {
dataDeleteAll(ca)
} else {
dataDeleteOne(ca)
}
}
def dataDeleteOne(ca: ConsoleArgs): Int = {
val apps = storage.Storage.getMetaDataApps
val channels = storage.Storage.getMetaDataChannels
apps.getByName(ca.app.name) map { app =>
val channelId = ca.app.dataDeleteChannel.map { ch =>
val channelMap = channels.getByAppid(app.id).map(c => (c.name, c.id)).toMap
if (!channelMap.contains(ch)) {
error(s"Unable to delete data for channel.")
error(s"Channel ${ch} doesn't exist.")
return 1
}
channelMap(ch)
}
if (channelId.isDefined) {
info(s"Data of the following channel will be deleted. Are you sure?")
info(s"Channel Name: ${ca.app.dataDeleteChannel.get}")
info(s" Channel ID: ${channelId.get}")
info(s" App Name: ${app.name}")
info(s" App ID: ${app.id}")
info(s" Description: ${app.description}")
} else {
info(s"Data of the following app (default channel only) will be deleted. Are you sure?")
info(s" App Name: ${app.name}")
info(s" App ID: ${app.id}")
info(s" Description: ${app.description}")
}
val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ")
choice match {
case "YES" => {
val events = storage.Storage.getLEvents()
// remove table
val r1 = if (events.remove(app.id, channelId)) {
if (channelId.isDefined) {
info(s"Removed Event Store for this channel ID: ${channelId.get}")
} else {
info(s"Removed Event Store for this app ID: ${app.id}")
}
0
} else {
if (channelId.isDefined) {
error(s"Error removing Event Store for this channel.")
} else {
error(s"Error removing Event Store for this app.")
}
1
}
// re-create table
val dbInit = events.init(app.id, channelId)
val r2 = if (dbInit) {
if (channelId.isDefined) {
info(s"Initialized Event Store for this channel ID: ${channelId.get}.")
} else {
info(s"Initialized Event Store for this app ID: ${app.id}.")
}
0
} else {
if (channelId.isDefined) {
error(s"Unable to initialize Event Store for this channel ID:" +
s" ${channelId.get}.")
} else {
error(s"Unable to initialize Event Store for this appId:" +
s" ${app.id}.")
}
1
}
events.close()
info("Done.")
r1 + r2
}
case _ =>
info("Aborted.")
0
}
} getOrElse {
error(s"App ${ca.app.name} does not exist. Aborting.")
1
}
}
def dataDeleteAll(ca: ConsoleArgs): Int = {
val apps = storage.Storage.getMetaDataApps
val channels = storage.Storage.getMetaDataChannels
val events = storage.Storage.getLEvents()
val status = apps.getByName(ca.app.name) map { app =>
info(s"All data of the app (including default and all channels) will be deleted." +
" Are you sure?")
info(s" App Name: ${app.name}")
info(s" App ID: ${app.id}")
info(s" Description: ${app.description}")
val chans = channels.getByAppid(app.id)
var firstChan = true
val titleName = "Channel Name"
val titleID = "Channel ID"
chans.foreach { ch =>
if (firstChan) {
info(f" Channels: ${titleName}%16s | ${titleID}%10s ")
firstChan = false
}
info(f" ${ch.name}%16s | ${ch.id}%10s")
}
val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ")
choice match {
case "YES" => {
// delete channels
val delChannelStatus: Seq[Int] = chans.map { ch =>
val r1 = if (events.remove(app.id, Some(ch.id))) {
info(s"Removed Event Store of the channel ID: ${ch.id}")
0
} else {
error(s"Error removing Event Store of the channel ID: ${ch.id}.")
1
}
// re-create table
val dbInit = events.init(app.id, Some(ch.id))
val r2 = if (dbInit) {
info(s"Initialized Event Store of the channel ID: ${ch.id}")
0
} else {
error(s"Unable to initialize Event Store of the channel ID: ${ch.id}.")
1
}
r1 + r2
}
if (delChannelStatus.filter(_ != 0).isEmpty) {
val r1 = if (events.remove(app.id)) {
info(s"Removed Event Store for this app ID: ${app.id}")
0
} else {
error(s"Error removing Event Store for this app.")
1
}
val dbInit = events.init(app.id)
val r2 = if (dbInit) {
info(s"Initialized Event Store for this app ID: ${app.id}.")
0
} else {
error(s"Unable to initialize Event Store for this appId: ${app.id}.")
1
}
info("Done.")
r1 + r2
} else 1
}
case _ =>
info("Aborted.")
0
}
} getOrElse {
error(s"App ${ca.app.name} does not exist. Aborting.")
1
}
events.close()
status
}
def channelNew(ca: ConsoleArgs): Int = {
val apps = storage.Storage.getMetaDataApps
val channels = storage.Storage.getMetaDataChannels
val events = storage.Storage.getLEvents()
val newChannel = ca.app.channel
val status = apps.getByName(ca.app.name) map { app =>
val channelMap = channels.getByAppid(app.id).map(c => (c.name, c.id)).toMap
if (channelMap.contains(newChannel)) {
error(s"Unable to create new channel.")
error(s"Channel ${newChannel} already exists.")
1
} else if (!storage.Channel.isValidName(newChannel)) {
error(s"Unable to create new channel.")
error(s"The channel name ${newChannel} is invalid.")
error(s"${storage.Channel.nameConstraint}")
1
} else {
val channelId = channels.insert(storage.Channel(
id = 0, // new id will be assigned
appid = app.id,
name = newChannel
))
channelId.map { chanId =>
info(s"Updated Channel meta-data.")
// initialize storage
val dbInit = events.init(app.id, Some(chanId))
if (dbInit) {
info(s"Initialized Event Store for the channel: ${newChannel}.")
info(s"Created new channel:")
info(s" Channel Name: ${newChannel}")
info(s" Channel ID: ${chanId}")
info(s" App ID: ${app.id}")
0
} else {
error(s"Unable to create new channel.")
error(s"Failed to initalize Event Store.")
// reverted back the meta data
try {
channels.delete(chanId)
0
} catch {
case e: Exception =>
error(s"Failed to revert back the Channel meta-data change.", e)
error(s"The channel ${newChannel} CANNOT be used!")
error(s"Please run 'pio app channel-delete ${app.name} ${newChannel}' " +
"to delete this channel!")
1
}
}
}.getOrElse {
error(s"Unable to create new channel.")
error(s"Failed to update Channel meta-data.")
1
}
}
} getOrElse {
error(s"App ${ca.app.name} does not exist. Aborting.")
1
}
events.close()
status
}
def channelDelete(ca: ConsoleArgs): Int = {
val apps = storage.Storage.getMetaDataApps
val channels = storage.Storage.getMetaDataChannels
val events = storage.Storage.getLEvents()
val deleteChannel = ca.app.channel
val status = apps.getByName(ca.app.name) map { app =>
val channelMap = channels.getByAppid(app.id).map(c => (c.name, c.id)).toMap
if (!channelMap.contains(deleteChannel)) {
error(s"Unable to delete channel.")
error(s"Channel ${deleteChannel} doesn't exist.")
1
} else {
info(s"The following channel will be deleted. Are you sure?")
info(s" Channel Name: ${deleteChannel}")
info(s" Channel ID: ${channelMap(deleteChannel)}")
info(s" App Name: ${app.name}")
info(s" App ID: ${app.id}")
val choice = if(ca.app.force) "YES" else readLine("Enter 'YES' to proceed: ")
choice match {
case "YES" => {
// NOTE: remove storage first before remove meta data (in case remove storage failed)
val dbRemoved = events.remove(app.id, Some(channelMap(deleteChannel)))
if (dbRemoved) {
info(s"Removed Event Store for this channel: ${deleteChannel}")
try {
channels.delete(channelMap(deleteChannel))
info(s"Deleted channel: ${deleteChannel}.")
0
} catch {
case e: Exception =>
error(s"Unable to delete channel.", e)
error(s"Failed to update Channel meta-data.")
error(s"The channel ${deleteChannel} CANNOT be used!")
error(s"Please run 'pio app channel-delete ${app.name} ${deleteChannel}' " +
"to delete this channel again!")
1
}
} else {
error(s"Unable to delete channel.")
error(s"Error removing Event Store for this channel.")
1
}
}
case _ =>
info("Aborted.")
0
}
}
} getOrElse {
error(s"App ${ca.app.name} does not exist. Aborting.")
1
}
events.close()
status
}
}
| schon/PredictionIO | tools/src/main/scala/io/prediction/tools/console/App.scala | Scala | apache-2.0 | 18,118 |
package com.scalafi.openbook
sealed trait Side
object Side {
case object Buy extends Side
case object Sell extends Side
case object NA extends Side
def apply(c: Char) = c match {
case 'B' => Buy
case 'S' => Sell
case _ if c.toByte == 0 => NA
case _ => sys.error(s"Unknown Buy/Sell side: '$c'")
}
} | ezhulenev/scala-openbook | src/main/scala/com/scalafi/openbook/Side.scala | Scala | mit | 326 |
package com.github.jpmossin.charjump
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class MatchingPositionSearcherTest extends FunSuite {
test("All lower case letters are included as single char jumps") {
val jumpKeys = MatchingPositionSearcher.mapPositionsToJumpKeys(0 to 25)
assert(jumpKeys.values.toSet.flatten === ('a' to 'z').toSet)
}
test("All generated jump sequences are unique") {
val positions = 0 to 1000
val seqs = MatchingPositionSearcher.mapPositionsToJumpKeys(positions).values.toSet
assert(positions.size == seqs.size)
}
test("The first character of jump sequences of different length should be unique") {
val positions = 0 to 1000
val seqs = MatchingPositionSearcher.mapPositionsToJumpKeys(positions).values
val jumpSeqByLength = seqs.groupBy(_.size)
assert(jumpSeqByLength.size == 3) // just to make sure we are actually testing something here.
// Compute the first char for the sequences of length 1, 2, and 3
// These sets should all be disjoint with each other.
val firstChars = jumpSeqByLength.mapValues(seqs => seqs.map(_.head).toSet)
assert((firstChars(1) intersect firstChars(2)).isEmpty)
assert((firstChars(1) intersect firstChars(2)).isEmpty)
assert((firstChars(2) intersect firstChars(3)).isEmpty)
}
}
| jpmossin/CharJump | src/test/scala/com/github/jpmossin/charjump/MatchingPositionSearcherTest.scala | Scala | mit | 1,396 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import java.io._
import java.util.concurrent.ConcurrentHashMap
import java.util.zip.{GZIPInputStream, GZIPOutputStream}
import scala.collection.mutable.{HashSet, Map}
import scala.collection.JavaConversions._
import scala.reflect.ClassTag
import org.apache.spark.rpc.{RpcEndpointRef, RpcEnv, RpcCallContext, RpcEndpoint}
import org.apache.spark.scheduler.MapStatus
import org.apache.spark.shuffle.MetadataFetchFailedException
import org.apache.spark.storage.BlockManagerId
import org.apache.spark.util._
private[spark] sealed trait MapOutputTrackerMessage
private[spark] case class GetMapOutputStatuses(shuffleId: Int)
extends MapOutputTrackerMessage
private[spark] case object StopMapOutputTracker extends MapOutputTrackerMessage
/** RpcEndpoint class for MapOutputTrackerMaster */
private[spark] class MapOutputTrackerMasterEndpoint(
override val rpcEnv: RpcEnv, tracker: MapOutputTrackerMaster, conf: SparkConf)
extends RpcEndpoint with Logging {
val maxAkkaFrameSize = AkkaUtils.maxFrameSizeBytes(conf)
override def receiveAndReply(context: RpcCallContext): PartialFunction[Any, Unit] = {
case GetMapOutputStatuses(shuffleId: Int) =>
val hostPort = context.sender.address.hostPort
logInfo("Asked to send map output locations for shuffle " + shuffleId + " to " + hostPort)
val mapOutputStatuses = tracker.getSerializedMapOutputStatuses(shuffleId)
val serializedSize = mapOutputStatuses.size
if (serializedSize > maxAkkaFrameSize) {
val msg = s"Map output statuses were $serializedSize bytes which " +
s"exceeds spark.akka.frameSize ($maxAkkaFrameSize bytes)."
/* For SPARK-1244 we'll opt for just logging an error and then sending it to the sender.
* A bigger refactoring (SPARK-1239) will ultimately remove this entire code path. */
val exception = new SparkException(msg)
logError(msg, exception)
context.sendFailure(exception)
} else {
context.reply(mapOutputStatuses)
}
case StopMapOutputTracker =>
logInfo("MapOutputTrackerMasterEndpoint stopped!")
context.reply(true)
stop()
}
}
/**
* Class that keeps track of the location of the map output of
* a stage. This is abstract because different versions of MapOutputTracker
* (driver and executor) use different HashMap to store its metadata.
*/
private[spark] abstract class MapOutputTracker(conf: SparkConf) extends Logging {
/** Set to the MapOutputTrackerMasterEndpoint living on the driver. */
var trackerEndpoint: RpcEndpointRef = _
/**
* This HashMap has different behavior for the driver and the executors.
*
* On the driver, it serves as the source of map outputs recorded from ShuffleMapTasks.
* On the executors, it simply serves as a cache, in which a miss triggers a fetch from the
* driver's corresponding HashMap.
*
* Note: because mapStatuses is accessed concurrently, subclasses should make sure it's a
* thread-safe map.
*/
protected val mapStatuses: Map[Int, Array[MapStatus]]
/**
* Incremented every time a fetch fails so that client nodes know to clear
* their cache of map output locations if this happens.
*/
protected var epoch: Long = 0
protected val epochLock = new AnyRef
/** Remembers which map output locations are currently being fetched on an executor. */
private val fetching = new HashSet[Int]
/**
* Send a message to the trackerEndpoint and get its result within a default timeout, or
* throw a SparkException if this fails.
*/
protected def askTracker[T: ClassTag](message: Any): T = {
try {
trackerEndpoint.askWithRetry[T](message)
} catch {
case e: Exception =>
logError("Error communicating with MapOutputTracker", e)
throw new SparkException("Error communicating with MapOutputTracker", e)
}
}
/** Send a one-way message to the trackerEndpoint, to which we expect it to reply with true. */
protected def sendTracker(message: Any) {
val response = askTracker[Boolean](message)
if (response != true) {
throw new SparkException(
"Error reply received from MapOutputTracker. Expecting true, got " + response.toString)
}
}
/**
* Called from executors to get the server URIs and output sizes of the map outputs of
* a given shuffle.
*/
def getServerStatuses(shuffleId: Int, reduceId: Int): Array[(BlockManagerId, Long)] = {
val statuses = mapStatuses.get(shuffleId).orNull
if (statuses == null) {
logInfo("Don't have map outputs for shuffle " + shuffleId + ", fetching them")
var fetchedStatuses: Array[MapStatus] = null
fetching.synchronized {
// Someone else is fetching it; wait for them to be done
while (fetching.contains(shuffleId)) {
try {
fetching.wait()
} catch {
case e: InterruptedException =>
}
}
// Either while we waited the fetch happened successfully, or
// someone fetched it in between the get and the fetching.synchronized.
fetchedStatuses = mapStatuses.get(shuffleId).orNull
if (fetchedStatuses == null) {
// We have to do the fetch, get others to wait for us.
fetching += shuffleId
}
}
if (fetchedStatuses == null) {
// We won the race to fetch the output locs; do so
logInfo("Doing the fetch; tracker endpoint = " + trackerEndpoint)
// This try-finally prevents hangs due to timeouts:
try {
val fetchedBytes = askTracker[Array[Byte]](GetMapOutputStatuses(shuffleId))
fetchedStatuses = MapOutputTracker.deserializeMapStatuses(fetchedBytes)
logInfo("Got the output locations")
mapStatuses.put(shuffleId, fetchedStatuses)
} finally {
fetching.synchronized {
fetching -= shuffleId
fetching.notifyAll()
}
}
}
if (fetchedStatuses != null) {
fetchedStatuses.synchronized {
return MapOutputTracker.convertMapStatuses(shuffleId, reduceId, fetchedStatuses)
}
} else {
logError("Missing all output locations for shuffle " + shuffleId)
throw new MetadataFetchFailedException(
shuffleId, reduceId, "Missing all output locations for shuffle " + shuffleId)
}
} else {
statuses.synchronized {
return MapOutputTracker.convertMapStatuses(shuffleId, reduceId, statuses)
}
}
}
/** Called to get current epoch number. */
def getEpoch: Long = {
epochLock.synchronized {
return epoch
}
}
/**
* Called from executors to update the epoch number, potentially clearing old outputs
* because of a fetch failure. Each executor task calls this with the latest epoch
* number on the driver at the time it was created.
*/
def updateEpoch(newEpoch: Long) {
epochLock.synchronized {
if (newEpoch > epoch) {
logInfo("Updating epoch to " + newEpoch + " and clearing cache")
epoch = newEpoch
mapStatuses.clear()
}
}
}
/** Unregister shuffle data. */
def unregisterShuffle(shuffleId: Int) {
mapStatuses.remove(shuffleId)
}
/** Stop the tracker. */
def stop() { }
}
/**
* MapOutputTracker for the driver. This uses TimeStampedHashMap to keep track of map
* output information, which allows old output information based on a TTL.
*/
private[spark] class MapOutputTrackerMaster(conf: SparkConf)
extends MapOutputTracker(conf) {
/** Cache a serialized version of the output statuses for each shuffle to send them out faster */
private var cacheEpoch = epoch
/**
* Timestamp based HashMap for storing mapStatuses and cached serialized statuses in the driver,
* so that statuses are dropped only by explicit de-registering or by TTL-based cleaning (if set).
* Other than these two scenarios, nothing should be dropped from this HashMap.
*/
protected val mapStatuses = new TimeStampedHashMap[Int, Array[MapStatus]]()
private val cachedSerializedStatuses = new TimeStampedHashMap[Int, Array[Byte]]()
// For cleaning up TimeStampedHashMaps
private val metadataCleaner =
new MetadataCleaner(MetadataCleanerType.MAP_OUTPUT_TRACKER, this.cleanup, conf)
def registerShuffle(shuffleId: Int, numMaps: Int) {
if (mapStatuses.put(shuffleId, new Array[MapStatus](numMaps)).isDefined) {
throw new IllegalArgumentException("Shuffle ID " + shuffleId + " registered twice")
}
}
def registerMapOutput(shuffleId: Int, mapId: Int, status: MapStatus) {
val array = mapStatuses(shuffleId)
array.synchronized {
array(mapId) = status
}
}
/** Register multiple map output information for the given shuffle */
def registerMapOutputs(shuffleId: Int, statuses: Array[MapStatus], changeEpoch: Boolean = false) {
mapStatuses.put(shuffleId, Array[MapStatus]() ++ statuses)
if (changeEpoch) {
incrementEpoch()
}
}
/** Unregister map output information of the given shuffle, mapper and block manager */
def unregisterMapOutput(shuffleId: Int, mapId: Int, bmAddress: BlockManagerId) {
val arrayOpt = mapStatuses.get(shuffleId)
if (arrayOpt.isDefined && arrayOpt.get != null) {
val array = arrayOpt.get
array.synchronized {
if (array(mapId) != null && array(mapId).location == bmAddress) {
array(mapId) = null
}
}
incrementEpoch()
} else {
throw new SparkException("unregisterMapOutput called for nonexistent shuffle ID")
}
}
/** Unregister shuffle data */
override def unregisterShuffle(shuffleId: Int) {
mapStatuses.remove(shuffleId)
cachedSerializedStatuses.remove(shuffleId)
}
/** Check if the given shuffle is being tracked */
def containsShuffle(shuffleId: Int): Boolean = {
cachedSerializedStatuses.contains(shuffleId) || mapStatuses.contains(shuffleId)
}
def incrementEpoch() {
epochLock.synchronized {
epoch += 1
logDebug("Increasing epoch to " + epoch)
}
}
def getSerializedMapOutputStatuses(shuffleId: Int): Array[Byte] = {
var statuses: Array[MapStatus] = null
var epochGotten: Long = -1
epochLock.synchronized {
if (epoch > cacheEpoch) {
cachedSerializedStatuses.clear()
cacheEpoch = epoch
}
cachedSerializedStatuses.get(shuffleId) match {
case Some(bytes) =>
return bytes
case None =>
statuses = mapStatuses.getOrElse(shuffleId, Array[MapStatus]())
epochGotten = epoch
}
}
// If we got here, we failed to find the serialized locations in the cache, so we pulled
// out a snapshot of the locations as "statuses"; let's serialize and return that
val bytes = MapOutputTracker.serializeMapStatuses(statuses)
logInfo("Size of output statuses for shuffle %d is %d bytes".format(shuffleId, bytes.length))
// Add them into the table only if the epoch hasn't changed while we were working
epochLock.synchronized {
if (epoch == epochGotten) {
cachedSerializedStatuses(shuffleId) = bytes
}
}
bytes
}
override def stop() {
sendTracker(StopMapOutputTracker)
mapStatuses.clear()
trackerEndpoint = null
metadataCleaner.cancel()
cachedSerializedStatuses.clear()
}
private def cleanup(cleanupTime: Long) {
mapStatuses.clearOldValues(cleanupTime)
cachedSerializedStatuses.clearOldValues(cleanupTime)
}
}
/**
* MapOutputTracker for the executors, which fetches map output information from the driver's
* MapOutputTrackerMaster.
*/
private[spark] class MapOutputTrackerWorker(conf: SparkConf) extends MapOutputTracker(conf) {
protected val mapStatuses: Map[Int, Array[MapStatus]] =
new ConcurrentHashMap[Int, Array[MapStatus]]
}
private[spark] object MapOutputTracker extends Logging {
val ENDPOINT_NAME = "MapOutputTracker"
// Serialize an array of map output locations into an efficient byte format so that we can send
// it to reduce tasks. We do this by compressing the serialized bytes using GZIP. They will
// generally be pretty compressible because many map outputs will be on the same hostname.
def serializeMapStatuses(statuses: Array[MapStatus]): Array[Byte] = {
val out = new ByteArrayOutputStream
val objOut = new ObjectOutputStream(new GZIPOutputStream(out))
Utils.tryWithSafeFinally {
// Since statuses can be modified in parallel, sync on it
statuses.synchronized {
objOut.writeObject(statuses)
}
} {
objOut.close()
}
out.toByteArray
}
// Opposite of serializeMapStatuses.
def deserializeMapStatuses(bytes: Array[Byte]): Array[MapStatus] = {
val objIn = new ObjectInputStream(new GZIPInputStream(new ByteArrayInputStream(bytes)))
Utils.tryWithSafeFinally {
objIn.readObject().asInstanceOf[Array[MapStatus]]
} {
objIn.close()
}
}
// Convert an array of MapStatuses to locations and sizes for a given reduce ID. If
// any of the statuses is null (indicating a missing location due to a failed mapper),
// throw a FetchFailedException.
private def convertMapStatuses(
shuffleId: Int,
reduceId: Int,
statuses: Array[MapStatus]): Array[(BlockManagerId, Long)] = {
assert (statuses != null)
statuses.map {
status =>
if (status == null) {
logError("Missing an output location for shuffle " + shuffleId)
throw new MetadataFetchFailedException(
shuffleId, reduceId, "Missing an output location for shuffle " + shuffleId)
} else {
(status.location, status.getSizeForBlock(reduceId))
}
}
}
}
| andrewor14/iolap | core/src/main/scala/org/apache/spark/MapOutputTracker.scala | Scala | apache-2.0 | 14,551 |
package sysadmin
import scalikejdbc.ConnectionPool
trait Connection {
Class.forName("org.h2.Driver")
ConnectionPool.add(Symbol("sysadmin"), "jdbc:h2:mem:sysadmin", "sa", "sa")
}
| skinny-framework/skinny-framework | factory-girl/src/test/scala/sysadmin/Connection.scala | Scala | mit | 184 |
package magic.gol
import akka.actor._
class ServiceActor extends Actor with ActorLogging with GeneralRoute {
def actorRefFactory = context
def receive = runRoute(route)
}
object ServiceActor {
def props = Props[ServiceActor]
}
| dvallejo/spray-example | src/main/scala/magic/gol/ServiceActor.scala | Scala | apache-2.0 | 239 |
package net.scalax.ubw.core
sealed abstract trait Pile {
self =>
type DataType
def leafZero: List[AtomicValue]
def leafZeroDataPiles: List[DataPile]
}
trait PileList extends Pile {
self =>
type PileType
override type DataType
def leafZero: List[AtomicValue] = {
encodePiles.map(_.leafZero).flatten
}
def leafZeroDataPiles: List[DataPile] = {
encodePiles.map(_.leafZeroDataPiles).flatten
}
val pileEntity: PileType
def encodePiles: List[CommonPile]
def decodePileData(datas: List[Any]): DataType
def encodePileData(data: DataType): List[Any]
}
class PileListImpl[PT, DT](
override val pileEntity: PT,
encoder: List[CommonPile],
dataDecoder: List[Any] => DT,
dataEncoder: DT => List[Any]
) extends PileList {
override type PileType = PT
override type DataType = DT
override def encodePiles: List[CommonPile] = encoder
override def decodePileData(data: List[Any]): DT = dataDecoder(data)
override def encodePileData(data: DataType): List[Any] = dataEncoder(data)
}
abstract trait CommonPile extends Pile {
self =>
type PathType
override type DataType
val pathPile: PathType
val fShape: PileShape[PathType, DataType]
}
trait BranchPile extends CommonPile {
self =>
val subs: Pile
def dataFromSub(subDatas: Any): DataType
def leafZero: List[AtomicValue] = {
subs.leafZero
}
def leafZeroDataPiles: List[DataPile] = {
subs.leafZeroDataPiles
}
}
class BranchPileImpl[PT, DT](
override val pathPile: PT,
override val fShape: PileShape[PT, DT],
override val subs: Pile,
dataFromSubFunc: Any => DT
) extends BranchPile {
override type PathType = PT
override type DataType = DT
override def dataFromSub(subDatas: Any): DataType = dataFromSubFunc(subDatas)
}
trait LeafPile extends CommonPile {
self =>
def leafZero: List[AtomicValue] = {
//DataPile.fromPile(self, fShape.zero :: Nil)._1 :: Nil
fShape.encodeData(fShape.zero)
}
def leafZeroDataPiles: List[DataPile] = {
List(
new LeafDataPileImpl(
pathPile = self.pathPile,
data = self.fShape.zero,
fShape = self.fShape
)
)
}
}
class LeafPileImpl[PT, DT](
override val pathPile: PT,
override val fShape: PileShape[PT, DT]
) extends LeafPile {
override type PathType = PT
override type DataType = DT
}
object Pile {
def apply[D](paths: AtomicPathImpl[D]): LeafPileImpl[AtomicPathImpl[D], AtomicValueImpl[D]] = {
val shape = PileShape.fpathPileShape[D]
new LeafPileImpl(paths, shape)
}
} | scalax/fsn | framework/ubw-core/src/main/scala/net/scalax/ubw/core/Pile.scala | Scala | mit | 2,672 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.Equality
import org.scalactic.Uniformity
import org.scalactic.Prettifier
import org.scalactic.StringNormalizations._
import SharedHelpers._
import FailureMessages.decorateToStringValue
import Matchers._
import exceptions.TestFailedException
class ListShouldContainAtLeastOneOfLogicalAndSpec extends FunSpec {
private val prettifier = Prettifier.default
val upperCaseStringEquality =
new Equality[String] {
def areEqual(a: String, b: Any): Boolean = upperCase(a) == b
}
private def upperCase(value: Any): Any =
value match {
case l: List[_] => l.map(upperCase(_))
case s: String => s.toUpperCase
case c: Char => c.toString.toUpperCase.charAt(0)
case (s1: String, s2: String) => (s1.toUpperCase, s2.toUpperCase)
case e: java.util.Map.Entry[_, _] =>
(e.getKey, e.getValue) match {
case (k: String, v: String) => Entry(k.toUpperCase, v.toUpperCase)
case _ => value
}
case _ => value
}
//ADDITIONAL//
val fileName: String = "ListShouldContainAtLeastOneOfLogicalAndSpec.scala"
describe("a List") {
val fumList: List[String] = List("fum")
val toList: List[String] = List("to")
describe("when used with (contain oneOf (..) and contain oneOf (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (contain atLeastOneOf ("fee", "fie", "foe", "fum") and contain atLeastOneOf("fie", "fee", "fum", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (contain atLeastOneOf ("happy", "birthday", "to", "you") and contain atLeastOneOf ("fee", "fie", "foe", "fum"))
}
checkMessageStackDepth(e1, Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"happy\\", \\"birthday\\", \\"to\\", \\"you\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (contain atLeastOneOf ("fee", "fie", "foe", "fum") and contain atLeastOneOf ("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e2, Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\"") + ", but " + Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"happy\\", \\"birthday\\", \\"to\\", \\"you\\""), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM") and contain atLeastOneOf ("FEE", "FIE", "FUM", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (contain atLeastOneOf ("fum", "foe") and contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM"))
}
checkMessageStackDepth(e1, Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM") and (contain atLeastOneOf ("fum", "foe")))
}
checkMessageStackDepth(e2, Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\"") + ", but " + Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM") and contain atLeastOneOf ("FEE", "FIE", "FUM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM") and contain atLeastOneOf ("fum", "foe"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\"") + ", but " + Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (contain atLeastOneOf ("fum", "foe") and contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
(fumList should (contain atLeastOneOf (" FEE ", " FIE ", " FOE ", " FUM ") and contain atLeastOneOf (" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (contain atLeastOneOf ("fee", "fie", "foe", "fie", "fum") and contain atLeastOneOf("fie", "fee", "fum", "foe"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atLeastOneOfDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
fumList should (contain atLeastOneOf ("fee", "fie", "foe", "fum") and contain atLeastOneOf("fee", "fie", "foe", "fie", "fum"))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.atLeastOneOfDuplicate))
}
}
describe("when used with (equal (..) and contain oneOf (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (equal (fumList) and contain atLeastOneOf("fie", "fee", "fum", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (equal (toList) and contain atLeastOneOf ("fee", "fie", "foe", "fum"))
}
checkMessageStackDepth(e1, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (equal (fumList) and contain atLeastOneOf ("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e2, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", but " + Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"happy\\", \\"birthday\\", \\"to\\", \\"you\\""), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (equal (fumList) and contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM"))
val e1 = intercept[TestFailedException] {
fumList should (equal (toList) and contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM"))
}
checkMessageStackDepth(e1, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (equal (fumList) and (contain atLeastOneOf ("fum", "foe")))
}
checkMessageStackDepth(e2, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", but " + Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (equal (fumList) and contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM"))) (decided by defaultEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (equal (fumList) and contain atLeastOneOf ("fum", "foe"))) (decided by defaultEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", but " + Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (equal (toList) and contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM"))) (decided by defaultEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
(fumList should (equal (fumList) and contain atLeastOneOf (" FEE ", " FIE ", " FOE ", " FUM "))) (decided by defaultEquality, after being lowerCased and trimmed)
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (equal (fumList) and contain atLeastOneOf("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atLeastOneOfDuplicate))
}
}
describe("when used with (be (..) and contain oneOf (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (be (fumList) and contain atLeastOneOf("fie", "fee", "fum", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (be (toList) and contain atLeastOneOf ("fee", "fie", "foe", "fum"))
}
checkMessageStackDepth(e1, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (be (fumList) and contain atLeastOneOf ("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e2, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", but " + Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"happy\\", \\"birthday\\", \\"to\\", \\"you\\""), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (be (fumList) and contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM"))
val e1 = intercept[TestFailedException] {
fumList should (be (toList) and contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM"))
}
checkMessageStackDepth(e1, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (be (fumList) and (contain atLeastOneOf ("fum", "foe")))
}
checkMessageStackDepth(e2, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", but " + Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (be (fumList) and contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (be (fumList) and contain atLeastOneOf ("fum", "foe"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", but " + Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (be (toList) and contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
(fumList should (be (fumList) and contain atLeastOneOf (" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed)
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (be (fumList) and contain atLeastOneOf("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atLeastOneOfDuplicate))
}
}
describe("when used with (contain oneOf (..) and be (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (contain atLeastOneOf("fie", "fee", "fum", "foe") and be (fumList))
val e1 = intercept[TestFailedException] {
fumList should (contain atLeastOneOf ("fee", "fie", "foe", "fum") and be (toList))
}
checkMessageStackDepth(e1, Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\"") + ", but " + Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (contain atLeastOneOf ("happy", "birthday", "to", "you") and be (fumList))
}
checkMessageStackDepth(e2, Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"happy\\", \\"birthday\\", \\"to\\", \\"you\\""), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM") and be (fumList))
val e1 = intercept[TestFailedException] {
fumList should (contain atLeastOneOf ("fum", "foe") and be (toList))
}
checkMessageStackDepth(e1, Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (contain atLeastOneOf ("fum", "foe") and (be (fumList)))
}
checkMessageStackDepth(e2, Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM") and be (fumList))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (contain atLeastOneOf ("fum", "foe") and be (fumList))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fum\\", \\"foe\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM") and be (toList))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\"") + ", but " + Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
(fumList should (contain atLeastOneOf (" FEE ", " FIE ", " FOE ", " FUM ") and be (fumList))) (after being lowerCased and trimmed)
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (contain atLeastOneOf("fee", "fie", "foe", "fie", "fum") and be (fumList))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atLeastOneOfDuplicate))
}
}
describe("when used with (not contain oneOf (..) and not contain oneOf (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (not contain atLeastOneOf ("fee", "fie", "foe", "fuu") and not contain atLeastOneOf("fie", "fee", "fuu", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (not contain atLeastOneOf ("fee", "fie", "foe", "fum") and not contain atLeastOneOf ("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e1, Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not contain atLeastOneOf ("happy", "birthday", "to", "you") and not contain atLeastOneOf ("fee", "fie", "foe", "fum"))
}
checkMessageStackDepth(e2, Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"happy\\", \\"birthday\\", \\"to\\", \\"you\\"") + ", but " + Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\""), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (not contain atLeastOneOf ("fum", "foe") and not contain atLeastOneOf ("fum", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (not contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM") and not contain atLeastOneOf ("fum", "foe"))
}
checkMessageStackDepth(e1, Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not contain atLeastOneOf ("fum", "foe") and (not contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM")))
}
checkMessageStackDepth(e2, Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fum\\", \\"foe\\"") + ", but " + Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\""), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (not contain atLeastOneOf ("fum", "foe") and not contain atLeastOneOf ("fum", "foe"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not contain atLeastOneOf ("fum", "foe") and not contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.didNotContainAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fum\\", \\"foe\\"") + ", but " + Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (not contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM") and not contain atLeastOneOf ("fum", "foe"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\""), fileName, thisLineNumber - 2)
(fumList should (contain atLeastOneOf (" FEE ", " FIE ", " FOE ", " FUM ") and contain atLeastOneOf (" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not contain atLeastOneOf ("fee", "fie", "foe", "fie", "fum") and not contain atLeastOneOf("fie", "fee", "fuu", "foe"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atLeastOneOfDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
fumList should (not contain atLeastOneOf ("fie", "fee", "fuu", "foe") and not contain atLeastOneOf("fee", "fie", "foe", "fie", "fum"))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.atLeastOneOfDuplicate))
}
}
describe("when used with (not equal (..) and not contain oneOf (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (not equal (toList) and not contain atLeastOneOf("fie", "fee", "fuu", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (not equal (fumList) and not contain atLeastOneOf ("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e1, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not equal (toList) and not contain atLeastOneOf ("fee", "fie", "foe", "fum"))
}
checkMessageStackDepth(e2, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", but " + Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\""), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (not equal (toList) and not contain atLeastOneOf ("fum", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (not equal (fumList) and not contain atLeastOneOf ("fum", "foe"))
}
checkMessageStackDepth(e1, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not equal (toList) and (not contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM")))
}
checkMessageStackDepth(e2, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", but " + Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\""), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (not equal (toList) and not contain atLeastOneOf ("fum", "foe"))) (decided by defaultEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not equal (toList) and not contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM"))) (decided by defaultEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", but " + Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (not equal (fumList) and not contain atLeastOneOf ("fum", "foe"))) (decided by defaultEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)), fileName, thisLineNumber - 2)
(fumList should (not contain atLeastOneOf (" FEE ", " FIE ", " FOE ", " FUU ") and not contain atLeastOneOf (" FEE ", " FIE ", " FOE ", " FUU "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not equal (toList) and not contain atLeastOneOf("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atLeastOneOfDuplicate))
}
}
describe("when used with (not be (..) and not contain oneOf (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (not be (toList) and not contain atLeastOneOf("fie", "fee", "fuu", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (not be (fumList) and not contain atLeastOneOf ("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e1, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not be (toList) and not contain atLeastOneOf ("fee", "fie", "foe", "fum"))
}
checkMessageStackDepth(e2, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", but " + Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\""), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (not be (toList) and not contain atLeastOneOf ("fum", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (not be (fumList) and not contain atLeastOneOf ("fum", "foe"))
}
checkMessageStackDepth(e1, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not be (toList) and (not contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM")))
}
checkMessageStackDepth(e2, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", but " + Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\""), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (not be (toList) and not contain atLeastOneOf ("fum", "foe"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not be (toList) and not contain atLeastOneOf ("FEE", "FIE", "FOE", "FUM"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", but " + Resources.containedAtLeastOneOf(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (not be (fumList) and not contain atLeastOneOf ("fum", "foe"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)), fileName, thisLineNumber - 2)
(fumList should (not contain atLeastOneOf (" FEE ", " FIE ", " FOE ", " FUU ") and not contain atLeastOneOf (" FEE ", " FIE ", " FOE ", " FUU "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not be (toList) and not contain atLeastOneOf("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atLeastOneOfDuplicate))
}
}
}
describe("collection of Lists") {
val list1s: Vector[List[Int]] = Vector(List(1), List(1), List(1))
val lists: Vector[List[Int]] = Vector(List(1), List(1), List(2))
val nils: Vector[List[Int]] = Vector(Nil, Nil, Nil)
val listsNil: Vector[List[Int]] = Vector(List(1), List(1), Nil)
val hiLists: Vector[List[String]] = Vector(List("hi"), List("hi"), List("hi"))
val toLists: Vector[List[String]] = Vector(List("to"), List("to"), List("to"))
def allErrMsg(index: Int, message: String, lineNumber: Int, left: Any): String =
"'all' inspection failed, because: \\n" +
" at index " + index + ", " + message + " (" + fileName + ":" + (lineNumber) + ") \\n" +
"in " + decorateToStringValue(prettifier, left)
describe("when used with (contain oneOf (..) and contain oneOf (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (list1s) should (contain atLeastOneOf (3, 2, 1) and contain atLeastOneOf (1, 3, 4))
atLeast (2, lists) should (contain atLeastOneOf (3, 1, 5) and contain atLeastOneOf (1, 3, 4))
atMost (2, lists) should (contain atLeastOneOf (3, 2, 8) and contain atLeastOneOf (2, 3, 4))
no (lists) should (contain atLeastOneOf (3, 6, 9) and contain atLeastOneOf (3, 4, 5))
no (nils) should (contain atLeastOneOf (1, 2, 8) and contain atLeastOneOf (1, 3, 4))
no (listsNil) should (contain atLeastOneOf(3, 8, 5) and contain atLeastOneOf (3, 4, 5))
val e1 = intercept[TestFailedException] {
all (lists) should (contain atLeastOneOf (1, 6, 8) and contain atLeastOneOf (1, 3, 4))
}
checkMessageStackDepth(e1, allErrMsg(2, decorateToStringValue(prettifier, List(2)) + " did not contain at least one of (1, 6, 8)", thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (lists) should (contain atLeastOneOf (1, 2, 8) and contain atLeastOneOf (1, 3, 4))
}
checkMessageStackDepth(e2, allErrMsg(2, decorateToStringValue(prettifier, List(2)) + " contained at least one of (1, 2, 8), but " + decorateToStringValue(prettifier, List(2)) + " did not contain at least one of (1, 3, 4)", thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e3 = intercept[TestFailedException] {
all (nils) should (contain atLeastOneOf ("hi", "hello") and contain atLeastOneOf ("ho", "hey", "howdy"))
}
checkMessageStackDepth(e3, allErrMsg(0, decorateToStringValue(prettifier, Nil) + " did not contain at least one of (\\"hi\\", \\"hello\\")", thisLineNumber - 2, nils), fileName, thisLineNumber - 2)
val e4 = intercept[TestFailedException] {
all (hiLists) should (contain atLeastOneOf ("hi", "hello") and contain atLeastOneOf ("ho", "hey", "howdy"))
}
checkMessageStackDepth(e4, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " contained at least one of (\\"hi\\", \\"hello\\"), but " + decorateToStringValue(prettifier, List("hi")) + " did not contain at least one of (\\"ho\\", \\"hey\\", \\"howdy\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e5 = intercept[TestFailedException] {
all (listsNil) should (contain atLeastOneOf (1, 3, 4) and contain atLeastOneOf (1, 3, Nil))
}
checkMessageStackDepth(e5, allErrMsg(2, decorateToStringValue(prettifier, Nil) + " did not contain at least one of (1, 3, 4)", thisLineNumber - 2, listsNil), fileName, thisLineNumber - 2)
val e6 = intercept[TestFailedException] {
all (lists) should (contain atLeastOneOf (1, 2, 8) and contain atLeastOneOf (1, 3, 4))
}
checkMessageStackDepth(e6, allErrMsg(2, decorateToStringValue(prettifier, List(2)) + " contained at least one of (1, 2, 8), but " + decorateToStringValue(prettifier, List(2)) + " did not contain at least one of (1, 3, 4)", thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (hiLists) should (contain atLeastOneOf ("HI", "HE") and contain atLeastOneOf ("HI", "HE"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (contain atLeastOneOf ("hi", "he") and contain atLeastOneOf ("HI", "HE"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " did not contain at least one of (\\"hi\\", \\"he\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (hiLists) should (contain atLeastOneOf ("HI", "HE") and contain atLeastOneOf ("hi", "he"))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " contained at least one of (\\"HI\\", \\"HE\\"), but " + decorateToStringValue(prettifier, List("hi")) + " did not contain at least one of (\\"hi\\", \\"he\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(all (hiLists) should (contain atLeastOneOf ("HI", "HE") and contain atLeastOneOf ("HI", "HE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (contain atLeastOneOf ("hi", "he") and contain atLeastOneOf ("HI", "HE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " did not contain at least one of (\\"hi\\", \\"he\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(all (hiLists) should (contain atLeastOneOf ("HI", "HE") and contain atLeastOneOf ("hi", "he"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " contained at least one of (\\"HI\\", \\"HE\\"), but " + decorateToStringValue(prettifier, List("hi")) + " did not contain at least one of (\\"hi\\", \\"he\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (contain atLeastOneOf (3, 2, 2, 1) and contain atLeastOneOf (1, 3, 4))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atLeastOneOfDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
all (list1s) should (contain atLeastOneOf (1, 3, 4) and contain atLeastOneOf (3, 2, 2, 1))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.atLeastOneOfDuplicate))
}
}
describe("when used with (be (..) and contain oneOf (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (list1s) should (be (List(1)) and contain atLeastOneOf (1, 3, 4))
atLeast (2, lists) should (be (List(1)) and contain atLeastOneOf (1, 3, 4))
atMost (2, lists) should (be (List(1)) and contain atLeastOneOf (2, 3, 4))
no (lists) should (be (List(8)) and contain atLeastOneOf (3, 4, 5))
no (nils) should (be (List(8)) and contain atLeastOneOf (1, 3, 4))
no (listsNil) should (be (List(8)) and contain atLeastOneOf (3, 4, 5))
val e1 = intercept[TestFailedException] {
all (lists) should (be (List(1)) and contain atLeastOneOf (1, 3, 4))
}
checkMessageStackDepth(e1, allErrMsg(2, decorateToStringValue(prettifier, List(2)) + " was not equal to " + decorateToStringValue(prettifier, List(1)), thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (list1s) should (be (List(1)) and contain atLeastOneOf (2, 3, 8))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, List(1)) + " was equal to " + decorateToStringValue(prettifier, List(1)) + ", but " + decorateToStringValue(prettifier, List(1)) + " did not contain at least one of (2, 3, 8)", thisLineNumber - 2, list1s), fileName, thisLineNumber - 2)
val e3 = intercept[TestFailedException] {
all (nils) should (be (List("hey")) and contain atLeastOneOf ("ho", "hey", "howdy"))
}
checkMessageStackDepth(e3, allErrMsg(0, decorateToStringValue(prettifier, Nil) + " was not equal to " + decorateToStringValue(prettifier, List("hey")), thisLineNumber - 2, nils), fileName, thisLineNumber - 2)
val e4 = intercept[TestFailedException] {
all (hiLists) should (be (List("hi")) and contain atLeastOneOf ("ho", "hey", "howdy"))
}
checkMessageStackDepth(e4, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " was equal to " + decorateToStringValue(prettifier, List("hi")) + ", but " + decorateToStringValue(prettifier, List("hi")) + " did not contain at least one of (\\"ho\\", \\"hey\\", \\"howdy\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e5 = intercept[TestFailedException] {
all (listsNil) should (be (List(1)) and contain atLeastOneOf (1, 3, Nil))
}
checkMessageStackDepth(e5, allErrMsg(2, decorateToStringValue(prettifier, Nil) + " was not equal to " + decorateToStringValue(prettifier, List(1)), thisLineNumber - 2, listsNil), fileName, thisLineNumber - 2)
val e6 = intercept[TestFailedException] {
all (list1s) should (be (List(1)) and contain atLeastOneOf (2, 3, 8))
}
checkMessageStackDepth(e6, allErrMsg(0, decorateToStringValue(prettifier, List(1)) + " was equal to " + decorateToStringValue(prettifier, List(1)) + ", but " + decorateToStringValue(prettifier, List(1)) + " did not contain at least one of (2, 3, 8)", thisLineNumber - 2, list1s), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (hiLists) should (be (List("hi")) and contain atLeastOneOf ("HI", "HE"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (be (List("ho")) and contain atLeastOneOf ("HI", "HE"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " was not equal to " + decorateToStringValue(prettifier, List("ho")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (hiLists) should (be (List("hi")) and contain atLeastOneOf ("hi", "he"))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " was equal to " + decorateToStringValue(prettifier, List("hi")) + ", but " + decorateToStringValue(prettifier, List("hi")) + " did not contain at least one of (\\"hi\\", \\"he\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(all (hiLists) should (be (List("hi")) and contain atLeastOneOf ("HI", "HE"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (be (List("ho")) and contain atLeastOneOf ("HI", "HE"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " was not equal to " + decorateToStringValue(prettifier, List("ho")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(all (hiLists) should (be (List("hi")) and contain atLeastOneOf ("hi", "he"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " was equal to " + decorateToStringValue(prettifier, List("hi")) + ", but " + decorateToStringValue(prettifier, List("hi")) + " did not contain at least one of (\\"hi\\", \\"he\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (be (List(1)) and contain atLeastOneOf (3, 2, 2, 1))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atLeastOneOfDuplicate))
}
}
describe("when used with (not contain oneOf (..) and not contain oneOf (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (list1s) should (not contain atLeastOneOf (3, 2, 8) and not contain atLeastOneOf (8, 3, 4))
atLeast (2, lists) should (not contain atLeastOneOf (3, 8, 5) and not contain atLeastOneOf (8, 3, 4))
atMost (2, lists) should (not contain atLeastOneOf (3, 6, 8) and contain atLeastOneOf (5, 3, 4))
no (lists) should (not contain atLeastOneOf (1, 2, 9) and not contain atLeastOneOf (2, 1, 5))
val e1 = intercept[TestFailedException] {
all (lists) should (not contain atLeastOneOf (2, 6, 8) and not contain atLeastOneOf (2, 3, 4))
}
checkMessageStackDepth(e1, allErrMsg(2, decorateToStringValue(prettifier, List(2)) + " contained at least one of (2, 6, 8)", thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (lists) should (not contain atLeastOneOf (3, 6, 8) and not contain atLeastOneOf (2, 3, 4))
}
checkMessageStackDepth(e2, allErrMsg(2, decorateToStringValue(prettifier, List(2)) + " did not contain at least one of (3, 6, 8), but " + decorateToStringValue(prettifier, List(2)) + " contained at least one of (2, 3, 4)", thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e3 = intercept[TestFailedException] {
all (hiLists) should (not contain atLeastOneOf ("hi", "hello") and not contain atLeastOneOf ("ho", "hey", "howdy"))
}
checkMessageStackDepth(e3, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " contained at least one of (\\"hi\\", \\"hello\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e4 = intercept[TestFailedException] {
all (hiLists) should (not contain atLeastOneOf ("ho", "hey", "howdy") and not contain atLeastOneOf ("hi", "hello"))
}
checkMessageStackDepth(e4, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " did not contain at least one of (\\"ho\\", \\"hey\\", \\"howdy\\"), but " + decorateToStringValue(prettifier, List("hi")) + " contained at least one of (\\"hi\\", \\"hello\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (hiLists) should (not contain atLeastOneOf ("hi", "he") and not contain atLeastOneOf ("hi", "he"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (not contain atLeastOneOf ("HI", "HE") and not contain atLeastOneOf ("hi", "he"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " contained at least one of (\\"HI\\", \\"HE\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (hiLists) should (not contain atLeastOneOf ("hi", "he") and not contain atLeastOneOf ("HI", "HE"))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " did not contain at least one of (\\"hi\\", \\"he\\"), but " + decorateToStringValue(prettifier, List("hi")) + " contained at least one of (\\"HI\\", \\"HE\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(all (hiLists) should (not contain atLeastOneOf ("hi", "he") and not contain atLeastOneOf ("hi", "he"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (not contain atLeastOneOf ("HI", "HE") and not contain atLeastOneOf ("hi", "he"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " contained at least one of (\\"HI\\", \\"HE\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(all (hiLists) should (not contain atLeastOneOf ("hi", "he") and not contain atLeastOneOf ("HI", "HE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " did not contain at least one of (\\"hi\\", \\"he\\"), but " + decorateToStringValue(prettifier, List("hi")) + " contained at least one of (\\"HI\\", \\"HE\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not contain atLeastOneOf (3, 2, 2, 1) and not contain atLeastOneOf (8, 3, 4))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atLeastOneOfDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not contain atLeastOneOf (8, 3, 4) and not contain atLeastOneOf (3, 2, 2, 1))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.atLeastOneOfDuplicate))
}
}
describe("when used with (not be (..) and not contain oneOf (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (list1s) should (not be (List(2)) and not contain atLeastOneOf (8, 3, 4))
atLeast (2, lists) should (not be (List(3)) and not contain atLeastOneOf (8, 3, 4))
atMost (2, lists) should (not be (List(3)) and contain atLeastOneOf (5, 3, 4))
no (list1s) should (not be (List(1)) and not contain atLeastOneOf (2, 1, 5))
val e1 = intercept[TestFailedException] {
all (lists) should (not be (List(2)) and not contain atLeastOneOf (2, 3, 4))
}
checkMessageStackDepth(e1, allErrMsg(2, decorateToStringValue(prettifier, List(2)) + " was equal to " + decorateToStringValue(prettifier, List(2)), thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (lists) should (not be (List(3)) and not contain atLeastOneOf (2, 3, 4))
}
checkMessageStackDepth(e2, allErrMsg(2, decorateToStringValue(prettifier, List(2)) + " was not equal to " + decorateToStringValue(prettifier, List(3)) + ", but " + decorateToStringValue(prettifier, List(2)) + " contained at least one of (2, 3, 4)", thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e3 = intercept[TestFailedException] {
all (hiLists) should (not be (List("hi")) and not contain atLeastOneOf ("ho", "hey", "howdy"))
}
checkMessageStackDepth(e3, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " was equal to " + decorateToStringValue(prettifier, List("hi")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e4 = intercept[TestFailedException] {
all (hiLists) should (not be (List("ho")) and not contain atLeastOneOf ("hi", "hello"))
}
checkMessageStackDepth(e4, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " was not equal to " + decorateToStringValue(prettifier, List("ho")) + ", but " + decorateToStringValue(prettifier, List("hi")) + " contained at least one of (\\"hi\\", \\"hello\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (hiLists) should (not be (List("ho")) and not contain atLeastOneOf ("hi", "he"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (not be (List("hi")) and not contain atLeastOneOf ("hi", "he"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " was equal to " + decorateToStringValue(prettifier, List("hi")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (hiLists) should (not be (List("ho")) and not contain atLeastOneOf ("HI", "HE"))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " was not equal to " + decorateToStringValue(prettifier, List("ho")) + ", but " + decorateToStringValue(prettifier, List("hi")) + " contained at least one of (\\"HI\\", \\"HE\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(all (hiLists) should (not be (List("ho")) and not contain atLeastOneOf ("hi", "he"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (not be (List("hi")) and not contain atLeastOneOf ("hi", "he"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " was equal to " + decorateToStringValue(prettifier, List("hi")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(all (hiLists) should (not be (List("ho")) and not contain atLeastOneOf ("HI", "HE"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, List("hi")) + " was not equal to " + decorateToStringValue(prettifier, List("ho")) + ", but " + decorateToStringValue(prettifier, List("hi")) + " contained at least one of (\\"HI\\", \\"HE\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not be (List(2)) and not contain atLeastOneOf (3, 2, 2, 1))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.atLeastOneOfDuplicate))
}
}
}
}
| dotty-staging/scalatest | scalatest-test/src/test/scala/org/scalatest/ListShouldContainAtLeastOneOfLogicalAndSpec.scala | Scala | apache-2.0 | 54,244 |
package mimir.exec;
import java.sql._;
import mimir.sql.JDBCUtils;
import mimir.algebra._;
import mimir.algebra.Type._;
class ResultSetIterator(src: ResultSet) extends ResultIterator
{
val meta = src.getMetaData();
var extract: List[() => PrimitiveValue] =
(0 until meta.getColumnCount()).map( (i) => {
JDBCUtils.convertSqlType(meta.getColumnType(i+1)) match {
case TString =>
() => {
new StringPrimitive(src.getString(i+1))
}
case TFloat =>
() => {
new FloatPrimitive(src.getDouble(i+1))
}
case TInt =>
() => {
if(meta.getColumnName(i+1).equalsIgnoreCase("ROWID"))
new RowIdPrimitive(src.getString(i+1))
else
new IntPrimitive(src.getLong(i+1))
}
}
}).toList
var schema: List[(String,Type.T)] =
(0 until meta.getColumnCount()).map( (i) => (
meta.getColumnName(i+1),
JDBCUtils.convertSqlType(meta.getColumnType(i+1))
) ).toList
var isFirst = true;
def apply(v: Int): PrimitiveValue = {
val ret = extract(v)()
if(src.wasNull()){ return new NullPrimitive(); }
else { return ret; }
}
def numCols: Int = extract.length
def open() = {
while(src.isBeforeFirst()){ src.next(); }
}
def getNext(): Boolean =
{
if(isFirst) { isFirst = false; }
else { src.next(); }
if(src.isAfterLast()){ return false; }
return true;
}
def close() = {
src.close();
}
def deterministicRow() = true;
def deterministicCol(v: Int) = true;
def missingRows() = false;
} | Legacy25/mimir | mimircore/src/main/scala/mimir/exec/ResultSetIterator.scala | Scala | apache-2.0 | 1,661 |
package org.clulab.dynet
import java.io.{BufferedReader, File, FileReader, PrintWriter}
import org.clulab.processors.clu.CluProcessor
import org.clulab.processors.{Document, Processor}
import org.clulab.serialization.DocumentSerializer
import org.clulab.struct.{Counter, DirectedGraph, GraphMap}
import org.slf4j.{Logger, LoggerFactory}
import scala.collection.mutable
import scala.collection.mutable.{ArrayBuffer, ListBuffer}
import scala.io.Source
import CoNLLSRLToMetal._
/**
* Reads a CoNLL-2008 formatted file (containing semantic roles) and converts it to our own Metal format
* User: mihais
* Date: 5/5/15
* Last Modified:
* 08/05/2020: Added the latest Metal format
* Update for Scala 2.12: bug #10151 workaround.
*
*/
class CoNLLSRLToMetal {
class CoNLLToken(
val word:String,
val pos:String,
val lemma:String,
val dep:(Int, String), // head, label
val pred:Int,
val frameBits:Array[String]) {
override def toString:String = word + "/" + pos + "/" + dep._1 + "/" + dep._2 + "/" + pred
}
var argConflictCount = 0
var multiPredCount = 0
var argCount = 0
var predCount = 0
def load(filePath:String):Document = {
val serFile = new File(filePath + ".ser")
if(serFile.exists()) {
// if the serialized file exists, use it
logger.debug(s"Found serialized file at ${serFile.getAbsolutePath}. Will use that.")
val documentSerializer = new DocumentSerializer
val b = new BufferedReader(new FileReader(serFile))
val doc = documentSerializer.load(b)
b.close()
doc
} else {
// the serialized file does not exist!
throw new RuntimeException(s"ERROR: Serialized file ${serFile.getAbsolutePath} does not exist! Please generate it using org.clulab.swirl2.ReaderMain.")
}
}
def read(file:File,
proc:Processor = null,
verbose:Boolean = false):Document = {
val source = Source.fromFile(file)
val sentences = new ArrayBuffer[Array[CoNLLToken]]
var sentence = new ArrayBuffer[CoNLLToken]
argConflictCount = 0
multiPredCount = 0
argCount = 0
predCount = 0
var tokenCount = 0
var sentCount = 0
var hyphCount = 0
//
// read all sentences
// also, collapse hyphenated phrases, which were brutally tokenized in CoNLL
//
for(l <- source.getLines()) {
val line = l.trim
if(line.length > 0) {
val bits = l.split("\\\\t")
// e println(s"LINE: $line")
assert(bits.size >= 14)
val token = mkToken(bits)
sentence += token
tokenCount += 1
if(token.pos == "HYPH") hyphCount += 1
} else {
// end of sentence
sentences += collapseHyphens(sentence.toArray, verbose)
sentence = new ArrayBuffer[CoNLLToken]()
sentCount += 1
}
}
source.close()
logger.debug(s"Read $tokenCount tokens, grouped in $sentCount sentences.")
logger.debug(s"Found $hyphCount hyphens.")
logger.debug(s"In hyphenated phrases, found $multiPredCount multi predicates and $argConflictCount argument conflicts.")
//
// construct the semantic roles from CoNLL tokens
//
val semDependencies = new ArrayBuffer[DirectedGraph[String]]()
for(sent <- sentences) {
semDependencies += mkSemanticDependencies(sent)
}
//
// construct one Document for the entire corpus and annotate it
//
val document = mkDocument(sentences.toArray, proc)
//
// assign the semantic roles to sentences in the created Document
//
assert(document.sentences.length == semDependencies.size)
for(i <- document.sentences.indices) {
document.sentences(i).setDependencies(GraphMap.SEMANTIC_ROLES, semDependencies(i))
}
logger.debug(s"Found a total of $predCount predicates with $argCount arguments.")
document
}
def mkDocument(sentences:Array[Array[CoNLLToken]], proc:Processor):Document = {
//
// create the document from tokens
// then, regenerate the POS tags and syntactic dependencies
//
val tokens = sentences.map(_.map(_.word).toList).toList
val doc = proc.mkDocumentFromTokens(tokens)
/*
if(USE_GOLD_SYNTAX) {
// this only works with the original tokenization. TODO: fix this
assert(USE_CONLL_TOKENIZATION)
for(i <- sentences.indices) {
val conllTokens = sentences(i)
val sent = doc.sentences(i)
sent.tags = Some(toTags(conllTokens))
println(s"Using tags: ${sent.tags.get.toList}")
sent.lemmas = Some(toLemmas(conllTokens))
}
} else {
proc.tagPartsOfSpeech(doc)
proc.lemmatize(doc)
}
*/
// Uncomment these lines if fancier features are needed!
proc.tagPartsOfSpeech(doc)
proc.lemmatize(doc)
proc.recognizeNamedEntities(doc)
/*
if(USE_GOLD_SYNTAX) {
// this only works with the original tokenization. TODO: fix this
assert(USE_CONLL_TOKENIZATION)
for(i <- sentences.indices) {
val conllTokens = sentences(i)
val sent = doc.sentences(i)
val depGraph = toDirectedGraph(conllTokens)
//println(depGraph)
// we set the gold CoNLL syntax as Stanford basic dependencies (hack)
sent.graphs += GraphMap.UNIVERSAL_BASIC -> depGraph
}
} else {
proc.parse(doc)
}
*/
doc
}
def toTags(tokens:Array[CoNLLToken]):Array[String] = tokens.map(_.pos)
def toLemmas(tokens:Array[CoNLLToken]):Array[String] = tokens.map(_.lemma)
def toDirectedGraph(tokens:Array[CoNLLToken]):DirectedGraph[String] = {
val edges = new mutable.ListBuffer[(Int, Int, String)] // head, modifier, label
for(modifier <- tokens.indices) {
val head = tokens(modifier).dep._1
if(head >= 0)
edges += Tuple3(head, modifier, tokens(modifier).dep._2)
() // workaround for bug #10151
}
DirectedGraph[String](DirectedGraph.triplesToEdges[String](edges.toList))
}
def mkSemanticDependencies(sentence:Array[CoNLLToken]):DirectedGraph[String] = {
val edges = new ListBuffer[(Int, Int, String)]
val heads = new mutable.HashSet[Int]()
val modifiers = new mutable.HashSet[Int]()
var columnOffset = -1
for(p <- sentence.indices) {
if(sentence(p).pred > 0) { // found a head
val head = p
heads += head
predCount += 1
columnOffset += sentence(p).pred // in case of multiple predicates squished in one token, use the last
for(i <- sentence.indices) {
if(sentence(i).frameBits(columnOffset) != "_") {
val modifier = i
val label = simplifyLabel(sentence(i).frameBits(columnOffset))
if(label.isDefined) {
edges += Tuple3(head, modifier, label.get)
modifiers += modifier
argCount += 1
}
}
}
}
}
DirectedGraph[String](DirectedGraph.triplesToEdges[String](edges.toList))
}
val KEEP_LABELS = Set("A0", "A1", "R-A0", "R-A1", "AM-TMP", "AM-LOC", "AM-MOD", "AM-NEG")
val AX_LABELS = Set("A2", "A3", "A4", "A5")
def simplifyLabel(label:String): Option[String] = {
if(! SIMPLIFY_ARG_LABELS) return Some(label)
//
// Keep: A0, A1, R-A0, R-A1, AM-TMP, AM-MNR, AM-LOC, AM-MOD, AM-ADV, AM-NEG
// Change: A2-5 => Ax
//
if(KEEP_LABELS.contains(label)) Some(label)
else if(AX_LABELS.contains(label)) Some("Ax")
else None
}
def mkToken(bits:Array[String]):CoNLLToken = {
val word = bits(1)
val pos = bits(4)
val lemma = bits(2)
val head = bits(8).toInt - 1 // CoNLL offsets start at 1; ours start at 0
val depLabel = bits(10)
val isPred = bits(13) match {
case "_" => 0
case _ => 1
}
val frameBits = bits.slice(14, bits.length)
new CoNLLToken(word, pos, lemma, Tuple2(head, depLabel), isPred, frameBits)
}
/**
* Merges tokens that were separated around dashes in CoNLL, to bring tokenization closer to the usual Treebank one
* We need this because most parsers behave horribly if hyphenated words are tokenized around dashes
*/
def collapseHyphens(origSentence:Array[CoNLLToken], verbose:Boolean):Array[CoNLLToken] = {
if(USE_CONLL_TOKENIZATION) return origSentence
val sent = new ArrayBuffer[CoNLLToken]()
var start = 0
while(start < origSentence.length) {
val end = findEnd(origSentence, start)
if(end > start + 1) {
val token = mergeTokens(origSentence, start, end, verbose)
sent += token
} else {
sent += origSentence(start)
}
start = end
}
sent.toArray
}
def findEnd(sent:Array[CoNLLToken], start:Int):Int = {
var end = start + 1
while(end < sent.length) {
if(sent(end).pos != "HYPH") return end
else end = end + 2
}
sent.length
}
def mergeTokens(sent:Array[CoNLLToken], start:Int, end:Int, verbose:Boolean):CoNLLToken = {
val phrase = sent.slice(start, end)
val word = phrase.map(_.word).mkString("")
val pos = phrase.last.pos // this one doesn't really matter; we retag the entire data with our Processor anyway...
val lemma = phrase.map(_.lemma).mkString("")
val pred = mergePredicates(phrase, verbose)
val frameBits = mergeFrames(phrase, verbose)
if(verbose) {
//logger.debug("Merging tokens: " + phrase.mkString(" ") + " as: " + word + "/" + isPred)
}
new CoNLLToken(word, pos, lemma, sent(start).dep, pred, frameBits) // TODO: fix this, generate correct collapsed CoNLL dependencies
}
def mergePredicates(phrase:Array[CoNLLToken], verbose:Boolean):Int = {
val l = phrase.map(_.pred).sum
if(l > 0) {
if(l > 1) {
if(verbose) logger.debug("Found MULTI PREDICATE in hyphenated phrase: " + phrase.mkString(" "))
multiPredCount += 1
}
if(verbose) {
// logger.info("Found hyphenated predicate: " + phrase.mkString(" "))
}
}
l
}
def mergeFrames(phrase:Array[CoNLLToken], verbose:Boolean):Array[String] = {
val frameBits = new Array[String](phrase(0).frameBits.length)
for(i <- frameBits.indices) {
frameBits(i) = mergeFrame(phrase, i, verbose)
}
frameBits
}
def mergeFrame(phrase:Array[CoNLLToken], position:Int, verbose:Boolean):String = {
// pick the right-most argument assignment
// for example, if the tokens have: "A1 _ A0" we would pick A0
// of course, the above scenario is HIGHLY unlikely. normally, there will be a single argument, e.g.: "_ _ A0"
var arg = "_"
var count = 0
for(i <- phrase.length - 1 to 0 by -1) {
if(phrase(i).frameBits(position) != "_") {
if(arg == "_") arg = phrase(i).frameBits(position)
count += 1
}
}
if(count > 1) {
if(verbose) logger.debug("Found ARGUMENT CONFLICT " + phrase.map(_.frameBits(position)).mkString(" ") + " in hyphenated phrase: " + phrase.mkString(" "))
argConflictCount += 1
}
arg
}
}
object CoNLLSRLToMetal {
val logger: Logger = LoggerFactory.getLogger(classOf[CoNLLSRLToMetal])
val USE_CONLL_TOKENIZATION = false
val SIMPLIFY_ARG_LABELS = true
val REMOVE_SELF_LOOPS = true // do not allow self arguments for predicates
//val USE_GOLD_SYNTAX = true
def main(args: Array[String]): Unit = {
assert(args.length == 2)
Utils.initializeDyNet()
val file = new File(args(0))
val reader = new CoNLLSRLToMetal
val proc = new CluProcessor() // FastNLPProcessor()
val doc = reader.read(file, proc, verbose = true)
labelStats(doc)
moreStats(doc)
// a simplified form of the CoNLL format
//saveSimplified(doc, args(1))
val predsFile = args(1) + ".preds"
val argsFile = args(1) + ".args"
// older Metal format, with one frame per sentence
//saveMetal(doc, predsFile, argsFile)
// new Metal format, with all frames saved in the same sentence
saveMetalFull(doc, predsFile, argsFile)
}
def moreStats(document: Document): Unit = {
var moreThanTwoPreds = 0
var edgeCount = 0
var multPreds = 0
var multPredPerArgSents = 0
val argPredHisto = new Counter[Int] // how many arguments with this many predicates
for(s <- document.sentences) {
if(s.semanticRoles.get.roots.size > 1) {
moreThanTwoPreds += 1
var isMultPredArg = false
val headCounts = new Counter[Int]() // counts the number of preds for each arg
for(edge <- s.semanticRoles.get.allEdges) {
edgeCount += 1
headCounts.incrementCount(edge._2) // _1 is the pred, _2 is the arg
}
for(arg <- headCounts.keySet) {
if(headCounts.getCount(arg) > 1) {
multPreds += 1
isMultPredArg = true
}
argPredHisto.incrementCount(headCounts.getCount(arg).toInt)
}
if(isMultPredArg) multPredPerArgSents += 1
}
}
println(s"Found $moreThanTwoPreds/${document.sentences.length} sentences with more than two predicates.")
println(s"Found $multPredPerArgSents/${document.sentences.length} sentences where at least 1 arg has more than 1 predicate.")
println(s"Out of $edgeCount (pred, arg) pairs, found $multPreds arguments with more than 1 predicate.")
println(s"argPredHisto: ${argPredHisto.sorted(true).mkString(", ")}")
}
def saveMetal(doc: Document, predsFile: String, argsFile: String): Unit = {
val predsPw = new PrintWriter(predsFile)
val argsPw = new PrintWriter(argsFile)
var selfLoopCount = 0
for(sent <- doc.sentences) {
val g = sent.graphs(GraphMap.SEMANTIC_ROLES)
val heads = new Array[String](sent.words.length)
for(i <- heads.indices) heads(i) = "O"
var headPositions = new mutable.HashSet[Int]()
for(e <- g.edges) {
headPositions += e.source
heads(e.source) = "B-P"
}
//
// save predicate information
//
assert(heads.length == sent.words.length)
for(i <- heads.indices) {
predsPw.println(
sent.words(i) + "\\t" +
heads(i) + "\\t0\\t" +
sent.tags.get(i) + "\\t" +
sent.entities.get(i)
)
}
//
// save one frame for each predicate in the Metal format
//
val sortedHeadPositions = headPositions.toList.sorted
val headMap = sortedHeadPositions.zipWithIndex.toMap
val args = new Array[Array[String]](headMap.size)
for(i <- args.indices) {
args(i) = new Array[String](sent.size)
for(j <- args(i).indices) args(i)(j) = "O"
}
for(e <- g.edges) {
args(headMap(e.source))(e.destination) = e.relation
if(REMOVE_SELF_LOOPS) {
if(e.source == e.destination) {
args(headMap(e.source))(e.destination) = "O"
selfLoopCount += 1
}
}
}
// each frame saved separately
assert(headMap.size == args.length)
assert(sortedHeadPositions.size == args.length)
for(fi <- args.indices) {
val predPosition = sortedHeadPositions(fi)
val frame = args(fi)
assert(frame.length == sent.words.length)
for(i <- frame.indices) {
argsPw.println(
sent.words(i) + "\\t" +
frame(i) + "\\t" +
predPosition + "\\t" +
sent.tags.get(i) + "\\t" +
sent.entities.get(i)
)
}
argsPw.println()
}
predsPw.println()
}
predsPw.close()
argsPw.close()
if(REMOVE_SELF_LOOPS) {
logger.info(s"Removed $selfLoopCount self-argument loops.")
}
}
def saveMetalFull(doc: Document, predsFile: String, argsFile: String): Unit = {
val predsPw = new PrintWriter(predsFile)
val argsPw = new PrintWriter(argsFile)
var selfLoopCount = 0
for(sent <- doc.sentences) {
val g = sent.graphs(GraphMap.SEMANTIC_ROLES)
val heads = new Array[String](sent.words.length)
for(i <- heads.indices) heads(i) = "O"
var headPositions = new mutable.HashSet[Int]()
for(e <- g.edges) {
headPositions += e.source
heads(e.source) = "B-P"
}
//
// save predicate information
//
assert(heads.length == sent.words.length)
for(i <- heads.indices) {
predsPw.println(
sent.words(i) + "\\t" +
sent.tags.get(i) + "\\t" +
sent.entities.get(i) + "\\t" +
heads(i)
)
}
predsPw.println()
//
// save one frame for each predicate in the Metal format
//
if(headPositions.nonEmpty) {
val sortedHeadPositions = headPositions.toList.sorted
val headMap = sortedHeadPositions.zipWithIndex.toMap
val args = new Array[Array[String]](headMap.size)
for (i <- args.indices) {
args(i) = new Array[String](sent.size)
for (j <- args(i).indices) args(i)(j) = "O"
}
for (e <- g.edges) {
args(headMap(e.source))(e.destination) = e.relation
if (REMOVE_SELF_LOOPS) {
if (e.source == e.destination) {
args(headMap(e.source))(e.destination) = "O"
selfLoopCount += 1
}
}
}
// save all frames together, as separate columns
assert(headMap.size == args.length)
assert(sortedHeadPositions.size == args.length)
for (i <- sent.words.indices) {
// word, POS tag, NE label
argsPw.print(
sent.words(i) + "\\t" +
sent.tags.get(i) + "\\t" +
sent.entities.get(i)
)
// (label, head position)+
for (fi <- args.indices) {
val predPosition = sortedHeadPositions(fi)
val frame = args(fi)
argsPw.print(
"\\t" + frame(i) +
"\\t" + predPosition
)
}
argsPw.println()
}
argsPw.println()
}
}
predsPw.close()
argsPw.close()
if(REMOVE_SELF_LOOPS) {
logger.info(s"Removed $selfLoopCount self-argument loops.")
}
}
def saveSimplified(doc: Document, outputFileName: String): Unit = {
val pw = new PrintWriter(outputFileName)
var selfLoopCount = 0
for(sent <- doc.sentences) {
val g = sent.graphs(GraphMap.SEMANTIC_ROLES)
val heads = new Array[Boolean](sent.words.length)
var headPositions = new mutable.HashSet[Int]()
for(e <- g.edges) {
headPositions += e.source
heads(e.source) = true
}
val headMap = headPositions.toList.sorted.zipWithIndex.toMap
val args = new Array[Array[String]](headMap.size)
for(i <- args.indices) {
args(i) = new Array[String](sent.size)
for(j <- args(i).indices) args(i)(j) = "O"
}
for(e <- g.edges) {
args(headMap(e.source))(e.destination) = e.relation
if(REMOVE_SELF_LOOPS) {
if(e.source == e.destination) {
args(headMap(e.source))(e.destination) = "O"
selfLoopCount += 1
}
}
}
for(i <- sent.words.indices) {
pw.print(sent.words(i) + "\\t" + (if(heads(i)) "B-P" else "O"))
pw.print("\\t" + sent.tags.get(i) + "\\t" + sent.entities.get(i))
for(j <- args.indices) {
pw.print("\\t" + args(j)(i))
}
pw.println()
}
pw.println()
}
pw.close()
if(REMOVE_SELF_LOOPS) {
logger.info(s"Removed $selfLoopCount self-argument loops.")
}
}
def labelStats(doc: Document): Unit = {
val labels = new Counter[String]()
for(sent <- doc.sentences) {
val g = sent.graphs(GraphMap.SEMANTIC_ROLES)
for(e <- g.allEdges) {
val l = e._3
labels += l
}
}
val pw = new PrintWriter("labels.tsv")
for(l <- labels.sorted){
pw.println(s"${l._1}\\t${l._2}")
}
pw.close()
}
}
| sistanlp/processors | main/src/main/scala/org/clulab/dynet/CoNLLSRLToMetal.scala | Scala | apache-2.0 | 19,938 |
package play.core.server.netty
import scala.language.reflectiveCalls
import org.jboss.netty.channel._
import org.jboss.netty.handler.codec.http._
import org.jboss.netty.handler.codec.http.websocketx._
import play.core._
import play.core.server.websocket.WebSocketHandshake
import play.api._
import play.api.libs.iteratee._
import play.api.libs.iteratee.Input._
import scala.concurrent.{ Future, Promise }
import scala.concurrent.stm._
import play.core.Execution.Implicits.internalContext
import org.jboss.netty.buffer.{ ChannelBuffers, ChannelBuffer }
import java.util.concurrent.atomic.AtomicInteger
private[server] trait WebSocketHandler {
import NettyFuture._
val WebSocketNormalClose = 1000
val WebSocketUnacceptable = 1003
val WebSocketMessageTooLong = 1009
/**
* The maximum number of messages allowed to be in flight. Messages can be up to 64K by default, so this number
* shouldn't be too high.
*/
private val MaxInFlight = 3
def newWebSocketInHandler[A](frameFormatter: play.api.mvc.WebSocket.FrameFormatter[A], bufferLimit: Long): (Enumerator[A], ChannelHandler) = {
val nettyFrameFormatter = frameFormatter.asInstanceOf[play.core.server.websocket.FrameFormatter[A]]
val enumerator = new WebSocketEnumerator[A]
(enumerator,
new SimpleChannelUpstreamHandler {
type FrameCreator = ChannelBuffer => WebSocketFrame
private var continuationBuffer: Option[(FrameCreator, ChannelBuffer)] = None
override def messageReceived(ctx: ChannelHandlerContext, e: MessageEvent) {
// Note, protocol violations like mixed up fragmentation are already handled upstream by the netty decoder
(e.getMessage, continuationBuffer) match {
// message too long
case (frame: ContinuationWebSocketFrame, Some((_, buffer))) if frame.getBinaryData.readableBytes() + buffer.readableBytes() > bufferLimit =>
closeWebSocket(ctx, WebSocketMessageTooLong, "Fragmented message too long, configured limit is " + bufferLimit)
// non final continuation
case (frame: ContinuationWebSocketFrame, Some((_, buffer))) if !frame.isFinalFragment =>
buffer.writeBytes(frame.getBinaryData)
// final continuation
case (frame: ContinuationWebSocketFrame, Some((creator, buffer))) =>
buffer.writeBytes(frame.getBinaryData)
continuationBuffer = None
val finalFrame = creator(buffer)
enumerator.frameReceived(ctx, El(nettyFrameFormatter.fromFrame(finalFrame)))
// fragmented text
case (frame: TextWebSocketFrame, None) if !frame.isFinalFragment && nettyFrameFormatter.fromFrame.isDefinedAt(frame) =>
val buffer = ChannelBuffers.dynamicBuffer(Math.min(frame.getBinaryData.readableBytes() * 2, bufferLimit.asInstanceOf[Int]))
buffer.writeBytes(frame.getBinaryData)
continuationBuffer = Some((b => new TextWebSocketFrame(true, frame.getRsv, buffer), buffer))
// fragmented binary
case (frame: BinaryWebSocketFrame, None) if !frame.isFinalFragment && nettyFrameFormatter.fromFrame.isDefinedAt(frame) =>
val buffer = ChannelBuffers.dynamicBuffer(Math.min(frame.getBinaryData.readableBytes() * 2, bufferLimit.asInstanceOf[Int]))
buffer.writeBytes(frame.getBinaryData)
continuationBuffer = Some((b => new BinaryWebSocketFrame(true, frame.getRsv, buffer), buffer))
// full handleable frame
case (frame: WebSocketFrame, None) if nettyFrameFormatter.fromFrame.isDefinedAt(frame) =>
enumerator.frameReceived(ctx, El(nettyFrameFormatter.fromFrame(frame)))
// client initiated close
case (frame: CloseWebSocketFrame, _) =>
closeWebSocket(ctx, frame.getStatusCode, "")
// ping!
case (frame: PingWebSocketFrame, _) =>
ctx.getChannel.write(new PongWebSocketFrame(frame.getBinaryData))
// unacceptable frame
case (frame: WebSocketFrame, _) =>
closeWebSocket(ctx, WebSocketUnacceptable, "This WebSocket does not handle frames of that type")
case _ => //
}
}
override def exceptionCaught(ctx: ChannelHandlerContext, e: ExceptionEvent) {
e.getCause.printStackTrace()
e.getChannel.close()
}
override def channelDisconnected(ctx: ChannelHandlerContext, e: ChannelStateEvent) {
enumerator.frameReceived(ctx, EOF)
Play.logger.trace("disconnected socket")
}
private def closeWebSocket(ctx: ChannelHandlerContext, status: Int, reason: String): Unit = {
if (!reason.isEmpty) {
Logger.trace("Closing WebSocket because " + reason)
}
if (ctx.getChannel.isOpen) {
for {
_ <- ctx.getChannel.write(new CloseWebSocketFrame(status, reason)).toScala
_ <- ctx.getChannel.close().toScala
} yield {
enumerator.frameReceived(ctx, EOF)
}
}
}
})
}
private class WebSocketEnumerator[A] extends Enumerator[A] {
val eventuallyIteratee = Promise[Iteratee[A, Any]]()
val iterateeRef = Ref[Iteratee[A, Any]](Iteratee.flatten(eventuallyIteratee.future))
private val promise: scala.concurrent.Promise[Iteratee[A, Any]] = Promise[Iteratee[A, Any]]()
/**
* The number of in flight messages. Incremented every time we receive a message, decremented every time a
* message is finished being handled.
*/
private val inFlight = new AtomicInteger(0)
def apply[R](i: Iteratee[A, R]) = {
eventuallyIteratee.success(i)
promise.asInstanceOf[scala.concurrent.Promise[Iteratee[A, R]]].future
}
def setReadable(channel: Channel, readable: Boolean) {
if (channel.isOpen) {
channel.setReadable(readable)
}
}
def frameReceived(ctx: ChannelHandlerContext, input: Input[A]) {
val channel = ctx.getChannel
if (inFlight.incrementAndGet() >= MaxInFlight) {
setReadable(channel, false)
}
val eventuallyNext = Promise[Iteratee[A, Any]]()
val current = iterateeRef.single.swap(Iteratee.flatten(eventuallyNext.future))
val next = current.flatFold(
(a, e) => {
setReadable(channel, true)
Future.successful(current)
},
k => {
if (inFlight.decrementAndGet() < MaxInFlight) {
setReadable(channel, true)
}
val next = k(input)
next.fold {
case Step.Done(a, e) =>
promise.success(next)
if (channel.isOpen) {
for {
_ <- channel.write(new CloseWebSocketFrame(WebSocketNormalClose, "")).toScala
_ <- channel.close().toScala
} yield next
} else {
Future.successful(next)
}
case Step.Cont(_) =>
Future.successful(next)
case Step.Error(msg, e) =>
/* deal with error, maybe close the socket */
Future.successful(next)
}
},
(err, e) => {
setReadable(channel, true)
/* handle error, maybe close the socket */
Future.successful(current)
})
eventuallyNext.success(next)
}
}
def websocketHandshake[A](ctx: ChannelHandlerContext, req: HttpRequest, e: MessageEvent, bufferLimit: Long)(frameFormatter: play.api.mvc.WebSocket.FrameFormatter[A]): Enumerator[A] = {
val (enumerator, handler) = newWebSocketInHandler(frameFormatter, bufferLimit)
val p: ChannelPipeline = ctx.getChannel.getPipeline
p.replace("handler", "handler", handler)
WebSocketHandshake.shake(ctx, req, bufferLimit)
enumerator
}
def websocketable(req: HttpRequest) = new server.WebSocketable {
def check =
HttpHeaders.Values.WEBSOCKET.equalsIgnoreCase(req.getHeader(HttpHeaders.Names.UPGRADE))
def getHeader(header: String) = req.getHeader(header)
}
}
| vangav/vos_backend | play-2.2.6/framework/src/play/src/main/scala/play/core/server/netty/WebSocketHandler.scala | Scala | mit | 8,171 |
package controllers
import db.{Authorization, OrganizationsDao, VersionsDao}
import javax.inject.{Inject, Named, Singleton}
import play.api.mvc._
import play.api.libs.json._
@Singleton
class Healthchecks @Inject() (
@Named("main-actor") mainActor: akka.actor.ActorRef,
val apibuilderControllerComponents: ApibuilderControllerComponents,
organizationsDao: OrganizationsDao,
versionsDao: VersionsDao
) extends ApibuilderController {
private[this] val Result = Json.toJson(Map("status" -> "healthy"))
def getHealthcheck() = Action { _ =>
organizationsDao.findAll(Authorization.PublicOnly, limit = 1).headOption
Ok(Result)
}
def getMigrate() = Action { _ =>
val stats = versionsDao.migrate()
Ok(
Json.toJson(
Map("good" -> stats.good, "bad" -> stats.bad)
)
)
}
}
| gheine/apidoc | api/app/controllers/Healthchecks.scala | Scala | mit | 823 |
package org.openapitools.models
import io.circe._
import io.finch.circe._
import io.circe.generic.semiauto._
import io.circe.java8.time._
import org.openapitools._
import org.openapitools.models.InputStepImpllinks
import org.openapitools.models.StringParameterDefinition
import scala.collection.immutable.Seq
/**
*
* @param Underscoreclass
* @param Underscorelinks
* @param id
* @param message
* @param ok
* @param parameters
* @param submitter
*/
case class InputStepImpl(Underscoreclass: Option[String],
Underscorelinks: Option[InputStepImpllinks],
id: Option[String],
message: Option[String],
ok: Option[String],
parameters: Option[Seq[StringParameterDefinition]],
submitter: Option[String]
)
object InputStepImpl {
/**
* Creates the codec for converting InputStepImpl from and to JSON.
*/
implicit val decoder: Decoder[InputStepImpl] = deriveDecoder
implicit val encoder: ObjectEncoder[InputStepImpl] = deriveEncoder
}
| cliffano/swaggy-jenkins | clients/scala-finch/generated/src/main/scala/org/openapitools/models/InputStepImpl.scala | Scala | mit | 1,077 |
package nsmc.mongo
import java.net.InetAddress
import org.apache.spark.SparkConf
private[nsmc]
case class MongoConnectorConf(
host: String,
port: Int = MongoConnectorConf.DefaultPort,
splitIndexed: Boolean,
splitSize: Int,
directToShards: Boolean,
useShardChunks: Boolean,
user: Option[String],
password: Option[String]
) {
def getDestination() : Destination = Destination(host, port, this)
}
private[nsmc]
object MongoConnectorConf extends nsmc.Logging {
val DefaultPort = 27017
val DefaultSplitSize = 4
val ConnectionHostProperty = "spark.nsmc.connection.host"
val ConnectionPortProperty = "spark.nsmc.connection.port"
val ConnectionUserProperty = "spark.nsmc.user"
val ConnectionPasswordProperty = "spark.nsmc.password"
val PartitioningSplitIndexedProperty = "spark.nsmc.split.indexed.collections"
val PartitioningSplitSizeProperty = "spark.nsmc.split.chunk.size"
val PartitioningDirectToShardsProperty = "spark.nsmc.direct.to.shards"
val PartitioningUseShardChunksProperty = "spark.nsmc.partition.on.shard.chunks"
def apply(conf: SparkConf): MongoConnectorConf = {
val host = conf.get(ConnectionHostProperty, InetAddress.getLocalHost.getHostAddress)
val port = conf.getInt(ConnectionPortProperty, DefaultPort)
val splitIndexed = conf.getBoolean(PartitioningSplitIndexedProperty, false)
val splitSize = conf.getInt(PartitioningSplitSizeProperty, DefaultSplitSize)
val directToShards = conf.getBoolean(PartitioningDirectToShardsProperty, false)
val useShardChunks = conf.getBoolean(PartitioningUseShardChunksProperty, false)
val user = conf.getOption(ConnectionUserProperty)
val password = conf.getOption(ConnectionPasswordProperty)
val userString = user.getOrElse("<absent>")
val passwordString = if (password.isDefined) "<present>" else "<absent>"
logDebug(s"host='$host' port='$port' user='$userString' password='$passwordString'")
logDebug(s"$PartitioningSplitIndexedProperty=$splitIndexed")
logDebug(s"$PartitioningSplitSizeProperty=$splitSize")
logDebug(s"$PartitioningDirectToShardsProperty=$directToShards")
logDebug(s"$PartitioningUseShardChunksProperty=$useShardChunks")
MongoConnectorConf(host, port, splitIndexed, splitSize, directToShards, useShardChunks, user, password)
}
}
| shotishu/spark-mongodb-connector | src/main/scala/nsmc/mongo/MongoConnectorConf.scala | Scala | apache-2.0 | 2,312 |
package views.html.relation
import lila.api.Context
import lila.app.templating.Environment._
import lila.app.ui.ScalatagsTemplate._
import controllers.routes
object actions {
private val dataHoverText = data("hover-text")
def apply(
userId: lila.user.User.ID,
relation: Option[lila.relation.Relation],
followable: Boolean,
blocked: Boolean,
signup: Boolean = false
)(implicit ctx: Context) =
div(cls := "relation-actions btn-rack")(
ctx.userId map { myId =>
(myId != userId) ?? frag(
!blocked option frag(
a(
titleOrText(trans.challenge.challengeToPlay.txt()),
href := s"${routes.Lobby.home}?user=$userId#friend",
cls := "btn-rack__btn",
dataIcon := ""
),
a(
titleOrText(trans.composeMessage.txt()),
href := routes.Msg.convo(userId),
cls := "btn-rack__btn",
dataIcon := ""
)
),
relation match {
case None =>
frag(
followable && !blocked option a(
cls := "btn-rack__btn relation-button",
href := routes.Relation.follow(userId),
titleOrText(trans.follow.txt()),
dataIcon := ""
),
a(
cls := "btn-rack__btn relation-button",
href := routes.Relation.block(userId),
titleOrText(trans.block.txt()),
dataIcon := ""
)
)
case Some(true) =>
a(
dataIcon := "",
cls := "btn-rack__btn relation-button text hover-text",
href := routes.Relation.unfollow(userId),
titleOrText(trans.following.txt()),
dataHoverText := trans.unfollow.txt()
)
case Some(false) =>
a(
dataIcon := "",
cls := "btn-rack__btn relation-button text hover-text",
href := routes.Relation.unblock(userId),
titleOrText(trans.blocked.txt()),
dataHoverText := trans.unblock.txt()
)
}
)
} getOrElse {
signup option frag(
trans.youNeedAnAccountToDoThat(),
a(href := routes.Auth.login, cls := "signup")(trans.signUp())
)
}
)
}
| luanlv/lila | app/views/relation/actions.scala | Scala | mit | 2,499 |
#!/opt/local/bin/scala
########################################################### {{{1 ###########
# Copyright © 2011 Martin Krischik
############################################################################
# $Author: krischik $
# $Revision: 6690 $
# $Date: 2014-12-29 20:51:07 +0100 (Mo, 29. Dez 2014) $
# $Id: Create-IC-Launcher.scala 6690 2014-12-29 19:51:07Z krischik $
# $HeadURL: svn+ssh://[email protected]/p/uiq3/code/trunk/Java/src/main/scripts/Create-IC-Launcher.scala $
########################################################### }}}1 ###########
!#
val logger = java.util.logging.Logger.getLogger ("Create-IC-Launcher")
if (args.length != 1)
{
logger.log (
java.util.logging.Level.INFO, """
Create-IC-Launcher must be lauched in the res directory of your project and you need to
pass exacty one parameter: The source file to be converted. The source file must not be one
of the potential destination files.For best result the Input file should be sqare and larger then 144x144 pixel.""")
}
else
{
val Input_File = new
java.io.File (args (0))
/**
* Convenience method that returns a scaled instance of the
* provided `BufferedImage`.
*
* @param img the original image to be scaled
* @param targetWidth the desired width of the scaled instance,
* in pixels
* @param targetHeight the desired height of the scaled instance,
* in pixels
* @param hint one of the rendering hints that corresponds to
* ` RenderingHints.KEY_INTERPOLATION` (e.g.
* ` RenderingHints.VALUE_INTERPOLATION_NEAREST_NEIGHBOR`,
* ` RenderingHints.VALUE_INTERPOLATION_BILINEAR`,
* ` RenderingHints.VALUE_INTERPOLATION_BICUBIC`)
* @param higherQuality if true, this method will use a multi-step
* scaling technique that provides higher quality than the usual
* one-step technique (only useful in downscaling cases, where
* ` targetWidth` or ` targetHeight` is
* smaller than the original dimensions, and generally only when
* the ` BILINEAR` hint is specified)
* @return a scaled version of the original ` BufferedImage`
*/
@scala.annotation.tailrec
def getScaledInstance (
img: java.awt.image.BufferedImage,
targetWidth: Int,
targetHeight: Int,
hint: Object = java.awt.RenderingHints.VALUE_INTERPOLATION_BICUBIC,
higherQuality: Boolean = false): java.awt.image.BufferedImage =
{
val currentWidth = img.getWidth
val currentHeight = img.getHeight
if (higherQuality && (targetWidth > currentWidth || targetHeight > currentHeight))
{
throw new
java.lang.IllegalArgumentException (
"higherQuality can only be true for downscaling")
} // if
if (currentWidth == targetWidth && currentHeight == targetHeight)
{
// the scaling is finished, return the image
img
}
else
{
def Half (value: Int, min: Int): Int =
{
val retval = value / 2
if (retval < min)
{
min
}
else
{
retval
} // return
} // Half
val Type = if (img.getTransparency == java.awt.Transparency.OPAQUE)
{
java.awt.image.BufferedImage.TYPE_INT_RGB
}
else
{
java.awt.image.BufferedImage.TYPE_INT_ARGB
} // val
val Size = if (higherQuality)
{
// Use multi-step technique: start with original size, then
// scale down in multiple passes with drawImage()
// until the target size is reached
(Half (img.getWidth, targetWidth), Half (img.getHeight, targetHeight))
}
else
{
// Use one-step technique: scale directly from original
// size to target size with a single drawImage() call
(targetWidth, targetHeight)
} // val
val targetImage = new
java.awt.image.BufferedImage (
/* width => */ Size._1,
/* height => */ Size._2,
/* imageType => */ Type)
val graphics = targetImage.createGraphics ()
graphics.setRenderingHint (
/* hintKey => */ java.awt.RenderingHints.KEY_INTERPOLATION,
/* hintValue => */ hint)
graphics.drawImage (
/* img => */ img,
/* x => */ 0,
/* y => */ 0,
/* width => */ Size._1,
/* height => */ Size._2,
/* observer => */ null)
graphics.dispose ()
getScaledInstance (
img = targetImage,
targetWidth = targetWidth,
targetHeight = targetHeight,
hint = hint,
higherQuality = higherQuality)
}
} // getScaledInstance
def Resize (Resolution: String, Size: Int): Unit =
{
val Output_Path = new
java.io.File ("drawable-" + Resolution)
val Output_File = new
java.io.File (Output_Path, Input_File.getName)
logger.log (java.util.logging.Level.INFO, "Read File = {0}", Input_File)
val Input_Image = javax.imageio.ImageIO.read (Input_File)
val Character = getScaledInstance (
img = Input_Image, targetWidth = Size, targetHeight = Size)
logger.log (java.util.logging.Level.INFO, "Write File = {0}", Output_File)
javax.imageio.ImageIO.write (Character, "png", Output_File)
} // Resize
val Base = 32
(
("mdpi", Base * 1.0) ::
("hdpi", Base * 1.5) ::
("xhdpi", Base * 2.0) ::
("xxhdpi", Base * 3.0) ::
("xxxhdpi", Base * 4.0) ::
Nil
).foreach
{
Set => Resize (Set._1, Set._2.toInt)
} // foreach
}
// vim: set nowrap tabstop=8 shiftwidth=4 softtabstop=4 expandtab :
// vim: set textwidth=0 filetype=scala foldmethod=marker nospell :
| krischik/Fit-Import | src/main/scripts/Create-IC-Menu.scala | Scala | gpl-3.0 | 5,726 |
package com.danielwestheide.test
import org.scalatest.{FlatSpec, Matchers}
class RDDBaseTest extends FlatSpec with Matchers {
behavior of "RDDBase"
it should "return calling site from outside kontextfrei package" in {
import com.danielwestheide.kontextfrei.rdd.CallSiteInfoTestHelper._
val (method, site) = rddmethod
method should be ("rddmethod")
site should be ("RDDBaseTest.scala:12")
}
}
| dwestheide/kontextfrei | core/src/test/scala/com/danielwestheide/test/RDDBaseTest.scala | Scala | apache-2.0 | 420 |
package com.artclod.mathml
import com.artclod.mathml.scalar.MathMLElem
import com.artclod.mathml.scalar.concept.Constant
import scala.xml._
case class Math(
override val prefix: String,
attributes1: MetaData,
override val scope: NamespaceBinding,
override val minimizeEmpty: Boolean,
val value: MathMLElem)
extends MathMLElem(prefix, "math", attributes1, scope, minimizeEmpty, Seq(value): _*) {
def this(value: MathMLElem) = this(MathML.h.prefix, MathML.h.attributes, MathML.h.scope, false, value)
def eval(boundVariables: Map[String, Double]) = value.eval(boundVariables)
def constant: Option[Constant] = value.c
def simplifyStep() = Math(prefix, attributes, scope, minimizeEmpty, value.s)
def variables: Set[String] = value.variables
def derivative(wrt: String) = Math(prefix, attributes, scope, minimizeEmpty, value.d(wrt).s)
override def toMathJS: String = value.toMathJS
}
object Math {
def apply(value: MathMLElem) = new Math(value)
}
| kristiankime/web-education-games | app/com/artclod/mathml/Math.scala | Scala | mit | 967 |
package com.github.agourlay.cornichon.http
import com.github.agourlay.cornichon.core.{ Scenario, ScenarioRunner, Session, SessionKey }
import com.github.agourlay.cornichon.dsl.SessionSteps.SessionStepBuilder
import com.github.agourlay.cornichon.testHelpers.IOSpec
import org.scalacheck.{ Gen, Properties }
import org.scalacheck.Prop._
class HttpDslProperties extends Properties("HttpDsl") with IOSpec {
private val ops = new HttpDslOps {}
property("removeFromWithHeaders handle no 'with-headers'") =
forAll(Gen.alphaStr) { header =>
ops.removeFromWithHeaders(header)(Session.newEmpty) == Right(Session.newEmpty)
}
property("save_body accepts to save any String as a body") =
forAll { input: String =>
val session = Session.newEmpty.addValuesUnsafe(HttpDsl.lastBodySessionKey.name -> input)
val saveStep = HttpDsl.save_body("new-key")
val assertStep = SessionStepBuilder(SessionKey("new-key")).is(input)
val s = Scenario("scenario with any save_body", saveStep :: assertStep :: Nil)
val t = awaitIO(ScenarioRunner.runScenario(session)(s))
t.isSuccess
}
}
| agourlay/cornichon | cornichon-core/src/test/scala/com/github/agourlay/cornichon/http/HttpDslProperties.scala | Scala | apache-2.0 | 1,123 |
package edison.cli.actions
import edison.cli.io.IO
import edison.cli.{ Config, Environment }
import edison.model.domain._
import edison.util.{ NoLogging, SmartSpec }
import org.scalamock.scalatest.MockFactory
class SampleGeneratorTest extends SmartSpec with MockFactory {
val ioMock = mock[IO]
val generator = new SampleGenerator(ioMock) with NoLogging
behavior of "SampleGenerator"
it must "write sampled point to console" in {
ioMock.writeToStdout _ expects argThat { msg: String => msg.contains("CacheSize") }
val cacheSizeParam = ParamDef("CacheSize", ParamDomainInteger(Range.inclusive(1, 100)))
val searchDomain = SearchDomain(ParamDefs(cacheSizeParam))
val project = Project("A project", searchDomain)
generator.generateSample(Environment(stub[Config], project))
}
}
| pawel-wiejacha/edison | service/src/test/scala/edison/cli/actions/SampleGeneratorTest.scala | Scala | mit | 811 |
// @GENERATOR:play-routes-compiler
// @SOURCE:D:/git/trask/glowroot/agent-parent/plugins/play-plugin/tmp-router-files/conf/routes
// @DATE:Sat Apr 09 15:57:27 PDT 2016
import play.api.routing.JavaScriptReverseRoute
import play.api.mvc.{ QueryStringBindable, PathBindable, Call, JavascriptLiteral }
import play.core.routing.{ HandlerDef, ReverseRouteContext, queryString, dynamicString }
import _root_.controllers.Assets.Asset
// @LINE:5
package controllers.javascript {
import ReverseRouteContext.empty
// @LINE:8
class ReverseAssets(_prefix: => String) {
def _defaultPrefix: String = {
if (_prefix.endsWith("/")) "" else "/"
}
// @LINE:8
def versioned: JavaScriptReverseRoute = JavaScriptReverseRoute(
"controllers.Assets.versioned",
"""
function(file1) {
return _wA({method:"GET", url:"""" + _prefix + { _defaultPrefix } + """" + "assets/" + (""" + implicitly[PathBindable[Asset]].javascriptUnbind + """)("file", file1)})
}
"""
)
}
// @LINE:9
class ReverseBadController(_prefix: => String) {
def _defaultPrefix: String = {
if (_prefix.endsWith("/")) "" else "/"
}
// @LINE:9
def bad: JavaScriptReverseRoute = JavaScriptReverseRoute(
"controllers.BadController.bad",
"""
function() {
return _wA({method:"GET", url:"""" + _prefix + { _defaultPrefix } + """" + "bad"})
}
"""
)
}
// @LINE:5
class ReverseHomeController(_prefix: => String) {
def _defaultPrefix: String = {
if (_prefix.endsWith("/")) "" else "/"
}
// @LINE:5
def index: JavaScriptReverseRoute = JavaScriptReverseRoute(
"controllers.HomeController.index",
"""
function() {
return _wA({method:"GET", url:"""" + _prefix + """"})
}
"""
)
}
// @LINE:6
class ReverseAsyncController(_prefix: => String) {
def _defaultPrefix: String = {
if (_prefix.endsWith("/")) "" else "/"
}
// @LINE:6
def message: JavaScriptReverseRoute = JavaScriptReverseRoute(
"controllers.AsyncController.message",
"""
function() {
return _wA({method:"GET", url:"""" + _prefix + { _defaultPrefix } + """" + "message"})
}
"""
)
}
// @LINE:7
class ReverseStreamController(_prefix: => String) {
def _defaultPrefix: String = {
if (_prefix.endsWith("/")) "" else "/"
}
// @LINE:7
def stream: JavaScriptReverseRoute = JavaScriptReverseRoute(
"controllers.StreamController.stream",
"""
function() {
return _wA({method:"GET", url:"""" + _prefix + { _defaultPrefix } + """" + "stream"})
}
"""
)
}
}
| trask/glowroot | agent/plugins/play-plugin/src/test/app-2.5.x-scala/scala/controllers/javascript/JavaScriptReverseRoutes.scala | Scala | apache-2.0 | 2,754 |
package scala.build
import sbt._
import sbt.complete.Parser._
import sbt.complete.Parsers._
import sbt.complete._
object ParserUtil {
def notStartingWith(parser: Parser[String], c: Char): Parser[String] = parser & not(c ~> any.*, s"value cannot start with $c.")
def concat(p: Parser[(String, String)]): Parser[String] = p.map { case (a, b) => a + b }
def Opt(a: Parser[String]) = a.?.map(_.getOrElse(""))
val StringBasicNotStartingWithDash = notStartingWith(StringBasic, '-')
val IsDirectoryFilter = new SimpleFileFilter(_.isDirectory)
val JarOrDirectoryParser = FileParser(GlobFilter("*.jar") || IsDirectoryFilter)
def FileParser(fileFilter: FileFilter, dirFilter: FileFilter = AllPassFilter, base: File = file(".")) = {
val childFilter = IsDirectoryFilter && dirFilter || fileFilter
def ensureSuffix(s: String, suffix: String) = if (s.endsWith(suffix)) s else s"$s$suffix"
def matching(prefix: String): List[String] = {
val prefixFile = new File(prefix)
val prefixIsAbsolute = prefixFile.isAbsolute
val preFile = if (prefixIsAbsolute) prefixFile else new File(base, prefix)
val basePrefix = if (prefixIsAbsolute) "" else ensureSuffix(base.getPath, "/")
def relativize(p: String) = p.stripPrefix(basePrefix)
def pathOf(f: File) = if (f.isDirectory() && !fileFilter.accept(f)) ensureSuffix(f.getPath, "/") else f.getPath
val finder = if (preFile.isDirectory()) {
preFile.glob(childFilter)
} else if (preFile.exists()) {
PathFinder(preFile).filter(fileFilter.accept)
} else {
preFile.getParentFile.glob(GlobFilter(s"${preFile.getName}*") && childFilter)
}
finder.get().toList.map(pathOf).map(relativize)
}
def displayPath = Completions.single(Completion.displayOnly("<path>"))
token(StringBasic, TokenCompletions.fixed((prefix, _) => if (prefix.isEmpty) displayPath else matching(prefix) match {
case Nil => displayPath
case xs => Completions.strict(xs.map(x => Completion.tokenDisplay(x.stripPrefix(prefix), x)).toSet)
})).filter(!_.startsWith("-"), x => x)
}
}
| lrytz/scala | project/ParserUtil.scala | Scala | apache-2.0 | 2,183 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.action
import io.gatling.AkkaSpec
import io.gatling.commons.util.DefaultClock
import io.gatling.core.EmptySession
import io.gatling.core.stats.StatsEngine
class RendezVousSpec extends AkkaSpec {
private val clock = new DefaultClock
"RendezVous" should "block the specified number of sessions until they have all reached it" in {
val rendezVous = RendezVous(3, system, mock[StatsEngine], clock, new ActorDelegatingAction("next", self))
rendezVous ! emptySession
expectNoMessage(remainingOrDefault)
rendezVous ! emptySession
expectNoMessage(remainingOrDefault)
rendezVous ! emptySession
expectMsgAllOf(emptySession, emptySession, emptySession)
rendezVous ! emptySession
expectMsg(emptySession)
}
}
| gatling/gatling | gatling-core/src/test/scala/io/gatling/core/action/RendezVousSpec.scala | Scala | apache-2.0 | 1,389 |
package rere.ql.queries
import java.util.UUID
import io.circe.{Json, JsonObject}
import org.scalatest.FlatSpec
import rere.ql.types._
class ValueQueriesTest extends FlatSpec with ReqlMatchers {
import rere.ql.queries.all.r
import rere.ql.queries.values._
behavior of "ValueQueries"
it should "convert basic types to ReQL types" in {
r.expr(null) shouldBe subtypeOf [ReqlNull] and serializedTo("null")
r.expr(true) shouldBe subtypeOf [ReqlBoolean] and serializedTo("true")
r.expr(false) shouldBe subtypeOf [ReqlBoolean] and serializedTo("false")
r.expr(123456) shouldBe subtypeOf [ReqlInteger] and serializedTo("123456")
r.expr(123654L) shouldBe subtypeOf [ReqlInteger] and serializedTo("123654")
r.expr(BigInt(345987L)) shouldBe subtypeOf [ReqlInteger] and serializedTo("345987")
r.expr(123.456) shouldBe subtypeOf [ReqlFloat] and serializedTo("123.456")
r.expr(Double.NaN) shouldBe subtypeOf [ReqlFloat] and serializedTo("null")
r.expr(Double.PositiveInfinity) shouldBe subtypeOf [ReqlFloat] and serializedTo("null")
r.expr(Double.NegativeInfinity) shouldBe subtypeOf [ReqlFloat] and serializedTo("null")
r.expr(BigDecimal(123.456)) shouldBe subtypeOf [ReqlFloat] and serializedTo("123.456")
r.expr("test \\"string\\"") shouldBe subtypeOf [ReqlString] and serializedTo(""""test \\"string\\""""")
r.expr(UUID.fromString("e0c568f6-a901-4ebd-a373-53908b28c2f8")) shouldBe
subtypeOf [ReqlUUID] and serializedTo(""""e0c568f6-a901-4ebd-a373-53908b28c2f8"""")
r.expr(Seq(r.expr(null), r.expr(true), r.expr(4815162342L), r.expr("yep"))) shouldBe
subtypeOf [ReqlArray[ReqlDatum]] and serializedTo("""[2,[null,true,4815162342,"yep"]]""")
r.expr(List(Json.fromString("test"))) shouldBe
subtypeOf [ReqlArray[ReqlJson]] and serializedTo("""[2,["test"]]""")
r.expr(JsonObject.fromIterable(Seq(
"code" -> Json.fromValues(Seq(
Json.fromInt(1), Json.fromInt(2), Json.fromInt(3)
))
))) shouldBe subtypeOf [ReqlJsonObject] and serializedTo("""{"code":[2,[1,2,3]]}""")
r.expr(JsonObject.fromIterable(Seq(
"code" -> Json.fromInt(123),
"name" -> Json.fromValues(Seq(Json.fromInt(1), Json.fromInt(2), Json.fromInt(3)))
))) shouldBe subtypeOf [ReqlJsonObject] and serializedTo("""{"code":123,"name":[2,[1,2,3]]}""")
r.expr(Map(
"code" -> r.expr(123),
"name" -> r.expr(Seq(r.expr(1), r.expr(2), r.expr(3)))
)) shouldBe subtypeOf [ReqlObject] and serializedTo("""{"code":123,"name":[2,[1,2,3]]}""")
r.expr(Map(
"co\\"de" -> r.expr(123),
"na\\\\me" -> r.expr(Seq(r.expr(1), r.expr(2), r.expr(3)))
)) shouldBe subtypeOf [ReqlObject] and serializedTo("""{"co\\"de":123,"na\\\\me":[2,[1,2,3]]}""")
r.expr(Map(
"code" -> r.expr(123),
"name" -> r.expr(Seq(r.expr(1), r.expr(JsonObject.fromMap(Map(
"test" -> Json.fromValues(Seq(Json.fromInt(1), Json.fromInt(2), Json.fromInt(3)))
))), r.expr(3)))
)) shouldBe
subtypeOf [ReqlObject] and
serializedTo("""{"code":123,"name":[2,[1,{"test":[2,[1,2,3]]},3]]}""")
r.expr(Map(
"test" -> r.expr(Seq(r.expr(1), r.expr(2))),
"test2" -> r.expr(Seq(r.expr(3), r.expr(Map(
"test3" -> r.expr(Seq(r.expr(4), r.expr(Seq(r.expr(5), r.expr(6)))))
))))
)) shouldBe
subtypeOf [ReqlObject] and
serializedTo("""{"test":[2,[1,2]],"test2":[2,[3,{"test3":[2,[4,[2,[5,6]]]]}]]}""")
r.expr(
Json.fromValues(Seq(Json.fromInt(123), Json.fromString("te\\"st"), Json.fromBoolean(false)))
) shouldBe subtypeOf [ReqlJson] and serializedTo("""[2,[123,"te\\"st",false]]""")
}
it should "implicitly convert some types to ReQL types" in {
import io.circe.literal._
r.expr(Seq(1, 2, 3): Seq[ReqlInteger]) shouldBe
subtypeOf [ReqlArray[ReqlInteger]] and serializedTo("[2,[1,2,3]]")
r.expr(Map("te\\"st" -> "test value"): Map[String, ReqlValue]) shouldBe
subtypeOf [ReqlObject] and serializedTo("""{"te\\"st":"test value"}""")
forceConversion(json"""{"abc": [1, [2, {"bcd": null}]]}""") shouldBe
subtypeOf[ReqlJson] and serializedTo("""{"abc":[2,[1,[2,[2,{"bcd":null}]]]]}""")
(json"""{"abc": [1, [2, {"bcd": null}]]}""": ReqlJson) shouldBe
subtypeOf[ReqlJson] and serializedTo("""{"abc":[2,[1,[2,[2,{"bcd":null}]]]]}""")
}
}
| pbaun/rere | modules/ql/src/test/scala/rere/ql/queries/ValueQueriesTest.scala | Scala | apache-2.0 | 4,344 |
package be.ac.umons.sdd2.util
import java.awt.Color
import scala.collection.mutable.Queue
import scala.io.Source
/**
* Created by florentdelgrange on 13/02/16.
*/
object SegmentLoader {
def matchColor(color: Color): String ={
color match {
case Color.BLUE => "Blue"
case Color.RED => "Red"
case Color.ORANGE => "Orange"
case Color.YELLOW => "Yellow"
case Color.BLACK => "Black"
case Color.GREEN => "Green"
case Color.GRAY => "Gray"
case Color.MAGENTA => "Magenta"
case _ => color.toString
}
}
/**
* Match a string with the linked color.
* @param string the string of the color
* @return the corresponding color
*/
def getColor(string : String): Color ={
string match {
case "Bleu" => Color.BLUE
case "Rouge" => Color.RED
case "Orange" => Color.ORANGE
case "Jaune" => Color.YELLOW
case "Noir" => Color.BLACK
case "Violet" => new Color(255, 0, 255)
case "Marron" => new Color(161,120,41)
case "Vert" => Color.GREEN
case "Gris" => Color.GRAY
case "Rose" => Color.MAGENTA
}
}
/**
* This function load all the segment of the file and returns an Array
* that contains all these segments.
* @param fileName the name of the file ; example : rectangle/rectangleHuge
* (you have to provide the path like this :
* type_of_scene/scene without the *.txt extension)
* @return an Array with all the instantiated segments , min and max bound
*/
def loadSegment(fileName: String): Array[Segment] = {
var i = 0
var segments = new Array[Segment](0)
val stringBuilder = new StringBuilder("assets/Scènes/")
stringBuilder ++= fileName; stringBuilder ++= ".txt"
for(line <- Source.fromFile(stringBuilder.substring(0)).getLines())
if(i == 0){
segments = new Array[Segment](line.split(" ")(3).toInt)
i+=1
}
else{
val lineArray = line.split(" ")
segments(i) = new Segment((lineArray(0).toDouble, lineArray(1).toDouble),
(lineArray(2).toDouble, lineArray(3).toDouble), getColor(lineArray(4)))
i+=1
}
segments
}
/**
* This function load all the segment of the file in a Queue.
* @param fileName the name of the file ; example : rectangle/rectangleHuge
* (you have to provide the path like this :
* type_of_scene/scene without the *.txt extension)
* @return a Queue with all the segments loaded in order of the file and
* the max values of the coordinates in absolute values.
*/
def loadSegmentInQueue(fileName: String): (Queue[Segment],Double,Double) = {
var maxX: Double = 0
var maxY: Double = 0
var firstIteration = true
var segments = Queue[Segment]()
val stringBuilder = new StringBuilder("/assets/Scènes/")
stringBuilder ++= fileName
stringBuilder ++= ".txt"
val in = getClass.getResourceAsStream(stringBuilder.substring(0))
for(line <- Source.fromInputStream(in).getLines())
if(firstIteration) {
firstIteration = false
val lineArray = line.split(" ")
maxX = lineArray(1).toDouble
maxY = lineArray(2).toDouble
}
else {
val lineArray = line.split(" ")
val segment = new Segment((lineArray(0).toDouble, lineArray(1).toDouble),
(lineArray(2).toDouble, lineArray(3).toDouble), getColor(lineArray(4)))
segments += segment
}
(segments,maxX,maxY)
}
}
| radioGiorgio/SDD2 | src/main/scala/be/ac/umons/sdd2/util/SegmentLoader.scala | Scala | gpl-3.0 | 3,559 |
package io.rout.example.benchmark
import io.rout._
import io.rout.circe._
import io.rout.generic.decoding._
import io.routs._
import io.circe.generic.auto._
object Benchmark extends App {
val jsonPayload = binaryBody.asJson[Payload]
val paramsPayload: ReqRead[Payload] = derive[Payload].fromParams
val payloadParams = post(Root / "params").sync(paramsPayload) { payload =>
Created(payload.toString)
}
val payloadParamsAuth = post(Root / "params" / "auth").filter[AuthedReq].sync(paramsPayload) { (auth, payload) =>
Created(payload.toString)
}
val payloadParamsPathAuth =
post(Root / "params" / "auth" / Match[String]).filter[AuthedReq].sync(paramsPayload) { (auth, string, payload) =>
Created(payload.toString)
}
val payloadJsonAuth = post(Root / "json" / "auth").filter[AuthedReq].sync(jsonPayload) { (auth, payload) =>
Created(payload)
}
val payloadJsonPathAuth =
post(Root / "json" / "auth" / Match[String]).filter[AuthedReq].sync(jsonPayload) { (auth, string, payload) =>
Created(payload)
}
val payloadJson = post(Root / "json").sync(jsonPayload)(p => Ok(p))
val rOut = mkRoutes(Seq(
payloadParams,
payloadJson,
AuthFilter.auth andThen payloadParamsAuth,
AuthFilter.auth andThen payloadJsonAuth,
AuthFilter.auth andThen payloadParamsPathAuth,
AuthFilter.auth andThen payloadJsonPathAuth
)).withNotFound("path was not found")
serve(rOut.service)
}
| teodimoff/rOut | examples/src/io/rout/benchmark/Benchmark.scala | Scala | apache-2.0 | 1,452 |
package org.jetbrains.plugins.scala.lang.psi.impl.statements.params
import com.intellij.lang.ASTNode
import com.intellij.psi._
import com.intellij.psi.impl.light.JavaIdentifier
import org.jetbrains.plugins.scala.extensions.{ObjectExt, ifReadAllowed}
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.psi.ScalaStubBasedElementImpl
import org.jetbrains.plugins.scala.lang.psi.api.ScalaElementVisitor
import org.jetbrains.plugins.scala.lang.psi.api.base.ScLiteral
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.params._
import org.jetbrains.plugins.scala.lang.psi.stubs._
import org.jetbrains.plugins.scala.lang.psi.stubs.elements.signatures.ScParamElementType
import org.jetbrains.plugins.scala.lang.psi.types._
import org.jetbrains.plugins.scala.lang.psi.types.api.Nothing
import org.jetbrains.plugins.scala.lang.psi.types.result._
/**
* @author Alexander Podkhalyuzin
*/
class ScParameterImpl protected (stub: ScParameterStub, nodeType: ScParamElementType[_ <: ScParameter], node: ASTNode)
extends ScalaStubBasedElementImpl(stub, nodeType, node) with ScParameter {
def this(node: ASTNode) = this(null, null, node)
def this(stub: ScParameterStub) = this(stub, ScalaElementTypes.PARAM, null)
override def toString: String = "Parameter: " + ifReadAllowed(name)("")
override def getTextOffset: Int = nameId.getTextRange.getStartOffset
def isCallByNameParameter: Boolean = byStubOrPsi(_.isCallByNameParameter)(paramType.exists(_.isCallByNameParameter))
override def getNameIdentifier: PsiIdentifier = new JavaIdentifier(getManager,nameId)
def deprecatedName: Option[String] = byStubOrPsi(_.deprecatedName) {
annotations.find(_.typeElement.getText.contains("deprecatedName")) match {
case Some(deprecationAnnotation) =>
deprecationAnnotation.constructor.args.flatMap {
case args =>
val exprs = args.exprs
if (exprs.length != 1) None
else {
exprs(0) match {
case literal: ScLiteral if literal.getNode.getFirstChildNode != null &&
literal.getNode.getFirstChildNode.getElementType == ScalaTokenTypes.tSYMBOL =>
val literalText = literal.getText
if (literalText.length < 2) None
else Some(literalText.substring(1))
case _ => None
}
}
}
case None => None
}
}
def nameId: PsiElement = {
val id = findChildByType[PsiElement](ScalaTokenTypes.tIDENTIFIER)
if (id == null) findChildByType[PsiElement](ScalaTokenTypes.tUNDER) else id
}
def getTypeElement = null
def typeElement: Option[ScTypeElement] = byPsiOrStub(paramType.flatMap(_.typeElement.toOption))(_.typeElement)
def `type`(): TypeResult = {
def success(t: ScType): TypeResult = Right(t)
//todo: this is very error prone way to calc type, when usually we need real parameter type
getStub match {
case null =>
typeElement match {
case None if baseDefaultParam =>
getActualDefaultExpression match {
case Some(t) => success(t.`type`().getOrNothing)
case None => success(Nothing)
}
case None => expectedParamType.map(_.unpackedType) match {
case Some(t) => success(t)
case None => success(Nothing)
}
case Some(e) => success(e.`type`().getOrAny)
}
case paramStub =>
paramStub.typeText match {
case None if paramStub.getParentStub != null && paramStub.getParentStub.getParentStub != null &&
paramStub.getParentStub.getParentStub.getParentStub.isInstanceOf[ScFunctionStub] =>
Failure("Cannot infer type")
case None => Failure("Wrong Stub problem") //shouldn't be
case Some(_: String) => paramStub.typeElement match {
case Some(te) => te.`type`()
case None => Failure("Wrong type element")
}
}
}
}
def baseDefaultParam: Boolean = byStubOrPsi(_.isDefaultParameter)(findChildByType(ScalaTokenTypes.tASSIGN) != null)
def isRepeatedParameter: Boolean = byStubOrPsi(_.isRepeated)(paramType.exists(_.isRepeatedParameter))
def getActualDefaultExpression: Option[ScExpression] = byPsiOrStub(findChild(classOf[ScExpression]))(_.bodyExpression)
override def accept(visitor: ScalaElementVisitor) {
visitor.visitParameter(this)
}
override def accept(visitor: PsiElementVisitor) {
visitor match {
case s: ScalaElementVisitor => s.visitParameter(this)
case _ => super.accept(visitor)
}
}
}
| gtache/intellij-lsp | intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/psi/impl/statements/params/ScParameterImpl.scala | Scala | apache-2.0 | 4,861 |
/*
* Copyright (C) 2013 Alcatel-Lucent.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
* Licensed to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package mbench
package properties
import java.util.{ Properties => JProps }
/**
* Class representing application properties.
*
* An instance of this class looks up the value of a property either in system properties passed on the
* command line or in its associated property file if it is not found. If ultimately the file does
* not exist or an entry for that property is missing in this file, then it falls back on the
* default value specified when the property was declared (see [[mbench.properties.Property]]).
*
* Not that the name of system properties passed on the command line must be prefixed by the name of
* the file associated to a Properties object. For example, if a integer property `foo` is associated to a
* properties file `bar.properties`, then the value of `foo` can be passed on the command line using
* the syntax `-Dbar.foo=4`.
*
* Properties are created using the factory methods declared in the companion object of this class.
*/
class Properties private (fileName: String, props: JProps) {
private lazy val prefix = {
val index = fileName.indexOf(".properties")
if (index > 0)
fileName.substring(0, index) + "."
else ""
}
/**
* Get the value of a property.
*
* @param p the property.
* @return its value retrieved either from system properties, a property file or its default value
* depending on where it is found first.
*/
def get[T](p: Property[T]): T = {
if (!prefix.isEmpty) {
val v = System.getProperty(prefix + p.name)
if (v != null)
p.valueFromString(v) match {
case Left(err) => System.err.println(fileName + ":" + err)
case Right(value) => return value
}
}
val s = props.getProperty(p.name)
if (s == null)
return p.defaultValue
p.valueFromString(s).fold(err => { ; p.defaultValue }, identity)
}
/**
* Get the value of a property.
*
* @param name the property name.
* @param defaultValue the default value of the property.
* @return its value retrieved either from system properties, a property file or its default value.
*/
def get[T: ClassTag](name: String, defaultValue: T): T =
get(Property[T](name, null, defaultValue))
}
/**
* Factory object for properties.
*/
object Properties {
/**
* Load a property file.
*
* If the file does not exists, the method returns an empty property object as if the file
* existed but did not declare any property.
*
* @param fileName the file name in which to search for a property in case it is not specified
* as system properties.
*/
def load(fileName: String): Properties = {
val alternateFileName = System.getProperty(fileName)
val file = if (alternateFileName == null) new java.io.File(fileName) else {
val file = new java.io.File(alternateFileName)
if (file.exists()) file
else {
System.err.println(alternateFileName + " not found, reverting to " + fileName)
new java.io.File(fileName)
}
}
new Properties(file.getName(), load(file))
}
private[this] def load(file: java.io.File): JProps = {
val props = new JProps()
if (file.exists) {
val in = new java.io.FileInputStream(file);
props.load(in)
in.close()
}
props
}
} | sbocq/mbench | mbench/src/main/scala/mbench/properties/Properties.scala | Scala | apache-2.0 | 4,020 |
/*
* Copyright © 2014 TU Berlin ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.emmalanguage
import org.apache.log4j.Logger
import org.apache.spark.sql.SparkSession
import java.nio.file.Paths
trait SparkAware {
Logger.getLogger("org.apache.spark").setLevel(org.apache.log4j.Level.WARN)
protected trait SparkConfig {
val appName: String
val master: String
val warehouseDir: String
}
protected val defaultSparkConfig = new SparkConfig {
val appName = this.getClass.getSimpleName
val master = "local[*]"
val warehouseDir = Paths.get(sys.props("java.io.tmpdir"), "spark-warehouse").toUri.toString
}
protected lazy val defaultSparkSession =
sparkSession(defaultSparkConfig)
protected def sparkSession(c: SparkConfig): SparkSession = SparkSession.builder()
.appName(c.appName)
.master(c.master)
.config("spark.sql.warehouse.dir", c.warehouseDir)
.getOrCreate()
protected def withDefaultSparkSession[T](f: SparkSession => T): T =
f(defaultSparkSession)
}
| emmalanguage/emma | emma-spark/src/main/scala/org/emmalanguage/SparkAware.scala | Scala | apache-2.0 | 1,574 |
// scalac: -Xlint:constant -Xfatal-warnings
object Test {
val fails = 1 + 2 / (3 - 2 - 1)
}
| scala/scala | test/files/neg/constant-warning.scala | Scala | apache-2.0 | 94 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.reactive.internal.operators
import cats.effect.IO
import monix.execution.atomic.Atomic
import monix.reactive.{BaseTestSuite, Observable, OverflowStrategy}
import scala.util.Success
object PublishSelectorSuite extends BaseTestSuite {
implicit val os: OverflowStrategy[Nothing] = OverflowStrategy.Default
test("publishSelector sanity test") { implicit s =>
val isStarted = Atomic(0)
val f = Observable
.range(0, 1000)
.doOnStartF(_ => IO(isStarted.increment()))
.publishSelector { source =>
Observable(source, source, source).merge
}
.sumL[Long]
.runToFuture
s.tick()
assertEquals(f.value, Some(Success(500 * 999 * 3)))
assertEquals(isStarted.get(), 1)
}
test("treating Stop event") { implicit s =>
val isStarted = Atomic(0)
val isCanceled = Atomic(false)
val f = Observable
.range(0, 10000)
.doOnStartF(_ => IO(isStarted.increment()))
.doOnSubscriptionCancelF(() => isCanceled.set(true))
.publishSelector { source =>
source.map(_ => 1)
}
.take(2000L)
.sumL
.runToFuture
s.tick()
assertEquals(f.value, Some(Success(2000)))
assertEquals(isStarted.get(), 1)
assert(isCanceled.get(), "isCanceled")
}
}
| monixio/monix | monix-reactive/shared/src/test/scala/monix/reactive/internal/operators/PublishSelectorSuite.scala | Scala | apache-2.0 | 1,951 |
package ml.combust.mleap.bundle.ops.feature
import ml.combust.bundle.BundleContext
import ml.combust.bundle.dsl._
import ml.combust.bundle.op.OpModel
import ml.combust.mleap.bundle.ops.MleapOp
import ml.combust.mleap.core.feature.NormalizerModel
import ml.combust.mleap.runtime.MleapContext
import ml.combust.mleap.runtime.transformer.feature.Normalizer
/**
* Created by hollinwilkins on 9/24/16.
*/
class NormalizerOp extends MleapOp[Normalizer, NormalizerModel] {
override val Model: OpModel[MleapContext, NormalizerModel] = new OpModel[MleapContext, NormalizerModel] {
override val klazz: Class[NormalizerModel] = classOf[NormalizerModel]
override def opName: String = Bundle.BuiltinOps.feature.normalizer
override def store(model: Model, obj: NormalizerModel)
(implicit context: BundleContext[MleapContext]): Model = {
model.withValue("p_norm", Value.double(obj.pNorm)).
withValue("input_size", Value.int(obj.inputSize))
}
override def load(model: Model)
(implicit context: BundleContext[MleapContext]): NormalizerModel = {
NormalizerModel(pNorm = model.value("p_norm").getDouble,
inputSize = model.value("input_size").getInt)
}
}
override def model(node: Normalizer): NormalizerModel = node.model
}
| combust-ml/mleap | mleap-runtime/src/main/scala/ml/combust/mleap/bundle/ops/feature/NormalizerOp.scala | Scala | apache-2.0 | 1,313 |
/**
* Created by peter_v on 22/02/15.
*/
package base
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import common._
@RunWith(classOf[JUnitRunner])
class PredicateObjectSuite extends FunSuite {
test("PredicateObject equality") {
val predicateObjectFoo1 = PredicateObject(
predicate = "rdf:type",
objectValue = "Foo",
objectType = "s"
)
val predicateObjectFoo2 = PredicateObject(
predicate = "rdf:type",
objectValue = "Foo",
objectType = "s"
)
val predicateObjectBar = PredicateObject(
predicate = "rdf:type",
objectValue = "Bar",
objectType = "s"
)
assertResult(true) { predicateObjectFoo1 == predicateObjectFoo1 }
assertResult(true) { predicateObjectFoo1 == predicateObjectFoo2 }
assertResult(true) { predicateObjectFoo2 == predicateObjectFoo1 }
assertResult(false) { predicateObjectFoo1 == predicateObjectBar }
assertResult(false) { predicateObjectBar == predicateObjectFoo1 }
assertResult(false) { predicateObjectFoo1 == "Foo" }
}
test("PredicateObject hashCode") {
val predicateObjectFoo1 = PredicateObject(
predicate = "rdf:type",
objectValue = "Foo",
objectType = "s"
)
val predicateObjectFoo2 = PredicateObject(
predicate = "rdf:type",
objectValue = "Foo",
objectType = "s"
)
val predicateObjectBar = PredicateObject(
predicate = "rdf:type",
objectValue = "Bar",
objectType = "s"
)
assertResult(true) { predicateObjectFoo1.hashCode == predicateObjectFoo1.hashCode }
assertResult(true) { predicateObjectFoo1.hashCode == predicateObjectFoo2.hashCode }
assertResult(false) { predicateObjectFoo1.hashCode == predicateObjectBar.hashCode }
}
test("PredicateObject can be created without explicit objectType (defaults to string)") {
val predicateObject = PredicateObject(
predicate = "rdf:type",
objectValue = "Foo"
)
assertResult(PredicateObject("rdf:type", "Foo", "s")) { predicateObject }
}
test("PredicateObject fails with empty predicate") {
intercept[IllegalArgumentException] {
PredicateObject("", "Foo", "s")
}
}
test("PredicateObject fails with empty object for an integer") {
intercept[IllegalArgumentException] {
PredicateObject("amd:ping", "", "i")
}
}
test("PredicateObject fails with invalid object for an integer") {
intercept[IllegalArgumentException] {
PredicateObject("amd:ping", "abc", "i")
}
}
test("PredicateObject fails with empty object for a decimal") {
intercept[IllegalArgumentException] {
PredicateObject("amd:dec", "", "d")
}
}
test("PredicateObject fails with invalid object for a decimal") {
intercept[IllegalArgumentException] {
PredicateObject("amd:dec", "abc.def", "d")
}
}
test("PredicateObject fails with empty object for a reference") {
intercept[IllegalArgumentException] {
PredicateObject("amd:ref", "", "r")
}
}
test("PredicateObject fails with invalid object for a reference") {
intercept[IllegalArgumentException] {
PredicateObject("amd:ref", "abcd-too-short", "r")
}
}
test("PredicateObject succeeds with a time that at least has proper date format") {
PredicateObject("amd:time", "2016-04-03", "t")
PredicateObject("amd:time", "2016-04-03T13:20:53Z", "t")
PredicateObject("amd:time", "2016-04-03T13:20:53.123Z", "t")
PredicateObject("amd:time", "2016-04-03T13:20:53.123456789Z", "t")
}
test("PredicateObject fails with invalid object for a time") {
intercept[IllegalArgumentException] {
PredicateObject("amd:time", "61-02-03", "t")
}
}
test("PredicateObject succeeds with a uri (any format)") {
PredicateObject("amd:uri", "", "u")
PredicateObject("amd:uri", "http://foobar.org", "u")
PredicateObject("amd:uri", "http://foo.org/bar?x=4&y=7", "u")
PredicateObject("amd:uri", "a83hjcx$%#^&", "u")
}
test("PredicateObject succeeds with a boolean") {
PredicateObject("amd:boolean", "true", "b")
PredicateObject("amd:boolean", "false", "b")
}
test("PredicateObject fails with invalid object for a boolean") {
intercept[IllegalArgumentException] {
PredicateObject("amd:boolean", "61-02-03", "b")
}
}
test("PredicateObject fails with undefined objectType") {
intercept[IllegalArgumentException] {
PredicateObject("amd:xyz", "abc", "z")
}
}
test("PredicateObject fails with a not yet implemented objectType") {
intercept[IllegalArgumentException] {
PredicateObject("amd:float", "1.56", "f")
}
}
test("PredicateObject can be created with empty object when a string") {
val testPredicateObject = PredicateObject(
predicate = "amd:bar",
objectValue = ""
)
assertResult(PredicateObject("amd:bar", "", "s")) { testPredicateObject }
}
test("PredicateObject with empty object be split to same length as non-empty object") {
val testPredicateObject = PredicateObject(
predicate = "amd:bar",
objectValue = "bar"
)
val testPredicateObjectEmpty = PredicateObject(
predicate = "amd:bar",
objectValue = ""
)
assertResult(6) { testPredicateObject.toString.split(separator, 6).length }
assertResult(6) { testPredicateObjectEmpty.toString.split(separator, 6).length }
}
test("PredicateObject can be created with at, from, to timestamps") {
val testPredicateObject = PredicateObject(
predicate = "amd:bar",
objectValue = "",
factsAtOption = Some("2014-11-21T23:59:36.123456789Z"),
from = OptionalTimestamp("2013-01-01T00:00:00Z"),
to = OptionalTimestamp("2015-12-31T23:59:59.999Z")
)
assertResult("2014-11-21T23:59:36.123456789Z") { testPredicateObject.at.toString }
assertResult("2013-01-01T00:00:00Z") { testPredicateObject.from.toString }
assertResult("2015-12-31T23:59:59.999Z") { testPredicateObject.to.toString }
}
test("PredicateObject can be created with at timestamp") {
PredicateObject(
predicate = "amd:bar",
objectValue = "",
factsAtOption = Some("2014-11-21T23:59:36.123456789Z")
)
}
test("PredicateObject can be created with from timestamp") {
PredicateObject(
predicate = "amd:bar",
objectValue = "",
from = OptionalTimestamp("2014-11-21T23:59:36.123456789Z")
)
}
test("PredicateObject can be created with to timestamp") {
PredicateObject(
predicate = "amd:bar",
objectValue = "",
to = OptionalTimestamp("2014-11-21T23:59:36.123456789Z")
)
}
test("PredicateObject has by default today in the at timestamp") {
val testPredicateObject = PredicateObject(
predicate = "amd:bar",
objectValue = ""
)
assert(testPredicateObject.at.get.toString.matches("""^\\d{4}-\\d\\d-\\d\\d$"""))
}
test("PredicateObject is invalid when created with `to` earlier than `from` timestamps") {
intercept[IllegalArgumentException] {
val testPredicateObject = PredicateObject(
predicate = "amd:bar",
objectValue = "",
from = OptionalTimestamp("2016-01-01T00:00:00Z"),
to = OptionalTimestamp("2015-12-31T23:59:59.999Z")
)
}
}
test("toString returns CSV style result") {
val testPredicateObject = PredicateObject(
predicate = "amd:bar",
objectValue = "bar",
factsAtOption = Some("2014-11-21T23:59:36.123456789Z"),
from = OptionalTimestamp("2013-01-01T00:00:00Z"),
to = OptionalTimestamp("2015-12-31T23:59:59.999Z")
)
val expected = "2014-11-21T23:59:36.123456789Z;2013-01-01T00:00:00Z;2015-12-31T23:59:59.999Z;amd:bar;s;bar"
assertResult(expected){ testPredicateObject.toString }
}
test("toString returns CSV style result with an at timestamps") {
val testPredicateObject = PredicateObject(
predicate = "amd:bar",
objectValue = "bar"
)
val expected = """2\\d\\d\\d-\\d\\d-\\d\\d;;;amd:bar;s;bar"""
assert(testPredicateObject.toString.matches(expected))
}
}
| petervandenabeele/AllMyData | src/test/scala/base/PredicateObjectSuite.scala | Scala | mit | 8,113 |
package eu.gruchala
import scala.collection.mutable
//generalnie na płaszczyźnie dwuwymiarowej odległość między dwoma punktami
//to pierwiastek z sumy kwadratów x, y
case class Point(x: Int, y: Int, z: Int) {
lazy val dist: Double = math.sqrt(List(x, y, z).map(_.toDouble).map(math.pow(_, 2)).sum)
}
object Points {
def closest(input: Iterator[Point], k: Int): List[Point] = {
val mem: mutable.Set[Point] = mutable.Set.empty
while (input.hasNext) {
val current = input.next()
val largerElems = mem.filter(_.dist > current.dist)
val hasLargerElems = largerElems.nonEmpty
val shouldAppend = k > mem.size
val hasNoSpace = mem.size >= k
if (hasLargerElems && hasNoSpace) {
val maxPoint = largerElems.max(Ordering.by[Point, Double](_.dist))
mem -= maxPoint
}
if (shouldAppend || hasLargerElems) mem += current
}
mem.toList
}
}
| leszekgruchala/scala-exercises | src/main/scala/eu/gruchala/Points.scala | Scala | apache-2.0 | 924 |
/*
* Copyright (c) 2014 Contributor. All rights reserved.
*/
package org.scalaide.debug.internal.ui.actions
import org.eclipse.core.resources.IProject
import org.eclipse.ui.IWorkbenchPage
import org.scalaide.debug.internal.ui.ExpressionEvaluatorView
import org.scalaide.ui.internal.actions.RunSelection
/**
* Thank to it it's possible to directly evaluate in evaluator text selected in Scala editor
*/
class RunSelectionInExpressionEvaluator extends RunSelection {
override def doWithSelection(project: IProject, activePage: IWorkbenchPage, text: String): Unit =
ExpressionEvaluatorView.evaluate(project, activePage, text)
} | andrey-ilinykh/scala-ide | org.scala-ide.sdt.debug.expression/src/org/scalaide/debug/internal/ui/actions/RunSelectionInExpressionEvaluator.scala | Scala | bsd-3-clause | 636 |
/*
* Copyright 2011-2019 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.runtime.fragment
import org.apache.hadoop.io.Writable
import com.asakusafw.runtime.flow.{ ArrayListBuffer, FileMapListBuffer, ListBuffer }
import com.asakusafw.runtime.model.DataModel
abstract class OutputFragment[T <: DataModel[T] with Writable](bufferSize: Int)
extends Fragment[T] {
def this() = this(-1)
def newDataModel(): T
private[this] val buf: ListBuffer[T] = {
val buf =
if (bufferSize >= 0) {
new FileMapListBuffer[T](bufferSize)
} else {
new ArrayListBuffer[T]()
}
buf.begin()
buf
}
override def doReset(): Unit = {
buf.shrink()
buf.begin()
}
override def doAdd(result: T): Unit = {
if (buf.isExpandRequired()) {
buf.expand(newDataModel())
}
buf.advance().copyFrom(result)
}
def iterator: Iterator[T] = {
buf.end()
val iter = buf.iterator()
new Iterator[T] {
private[this] var hasnext = true
override def hasNext: Boolean = {
if (hasnext) {
if (!iter.hasNext) {
hasnext = false
doReset()
}
}
hasnext
}
override def next(): T =
if (hasNext) {
iter.next()
} else {
Iterator.empty.next()
}
}
}
}
| ashigeru/asakusafw-spark | runtime/src/main/scala/com/asakusafw/spark/runtime/fragment/OutputFragment.scala | Scala | apache-2.0 | 1,905 |
/**
* Copyright (c) 2016 Intel Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.trustedanalytics.sparktk.frame.internal.ops.statistics.correlation
import org.apache.commons.lang.StringUtils
import org.trustedanalytics.sparktk.frame.internal.{ FrameState, FrameSummarization, BaseFrame }
trait CorrelationSummarization extends BaseFrame {
/**
* Calculate correlation for two columns of the current frame.
*
* @note This method applies only to columns containing numerical data.
*
* @param columnNameA The name of the column to compute the correlation.
* @param columnNameB The name of the column to compute the correlation.
* @return Pearson correlation coefficient of the two columns.
*/
def correlation(columnNameA: String,
columnNameB: String): Double = {
execute(Correlation(columnNameA, columnNameB))
}
}
case class Correlation(columnNameA: String, columnNameB: String) extends FrameSummarization[Double] {
lazy val dataColumnNames = List(columnNameA, columnNameB)
require(dataColumnNames.forall(StringUtils.isNotEmpty(_)), "data column names cannot be null or empty.")
override def work(state: FrameState): Double = {
state.schema.validateColumnsExist(dataColumnNames)
// Calculate correlation
CorrelationFunctions.correlation(state, dataColumnNames)
}
}
| dmsuehir/spark-tk | sparktk-core/src/main/scala/org/trustedanalytics/sparktk/frame/internal/ops/statistics/correlation/Correlation.scala | Scala | apache-2.0 | 1,967 |
/**
* Copyright 2014-2015 Martin Cooper
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.github.martincooper.datatable.DataViewSpecs
import com.github.martincooper.datatable.{ DataColumn, DataTable }
import org.scalatest.{ FlatSpec, Matchers }
class DataViewToDataTableSpec extends FlatSpec with Matchers {
private def buildTestTable(): DataTable = {
val dataColOne = new DataColumn[Int]("ColOne", (0 to 99) map { i => i })
val dataColTwo = new DataColumn[String]("ColTwo", (0 to 99) map { i => "Value : " + i })
val dataColThree = new DataColumn[Boolean]("ColThree", (0 to 99) map { i => i % 2 == 0 })
DataTable("TestTable", Seq(dataColOne, dataColTwo, dataColThree)).get
}
"A new DataView" can "be converted to a DataTable" in {
val dataTable = buildTestTable()
val dataView = dataTable.toDataView
val newDataTable = dataView.toDataTable
newDataTable.name should be("TestTable")
newDataTable.rowCount should be(100)
newDataTable.columns.length should be(3)
}
it can "be converted to a DataTable from a filtered set" in {
val dataTable = buildTestTable()
val filteredDataView = dataTable.filter(row => row.as[Int]("ColOne") > 49)
val newDataTable = filteredDataView.toDataTable
newDataTable.name should be("TestTable")
newDataTable.rowCount should be(50)
newDataTable.columns.length should be(3)
}
it can "be converted to a DataTable from multiple filtered sets" in {
val dataTable = buildTestTable()
val filteredDataOne = dataTable.filter(row => row.as[Int]("ColOne") > 49)
val filteredDataTwo = filteredDataOne.filter(row => !row.as[Boolean]("ColThree"))
val filteredDataThree = filteredDataTwo.filter(row => row.as[String]("ColTwo").endsWith("5"))
val newDataTable = filteredDataThree.toDataTable
newDataTable.name should be("TestTable")
newDataTable.rowCount should be(5)
newDataTable.columns.length should be(3)
newDataTable.columns(0).data should be(Seq(55, 65, 75, 85, 95))
}
} | martincooper/scala-datatable | src/test/scala/com/github/martincooper/datatable/DataViewSpecs/DataViewToDataTableSpec.scala | Scala | apache-2.0 | 2,545 |
/*
* Copyright (c) 2016 dawid.melewski
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package actyxpoweruseralert.services
import java.util.concurrent.ConcurrentHashMap
import actyxpoweruseralert.model._
import com.typesafe.scalalogging.LazyLogging
import scala.collection.convert.decorateAsScala._
import scala.concurrent.{ ExecutionContext, Future }
trait MachineInfoLogStorageService {
def save(machineId: MachineId, machine: MachineInfo): Future[Unit]
def getInfoLogs(machineId: MachineId): Future[List[MachineInfo]]
}
class InMemoryMachinesLogStorageService(implicit val ex: ExecutionContext)
extends MachineInfoLogStorageService
with LazyLogging {
private var machines =
new ConcurrentHashMap[MachineId, List[MachineInfo]]().asScala
override def save(machineId: MachineId, machine: MachineInfo): Future[Unit] =
Future {
val logs = machines
.getOrElse(machineId, List.empty)
.filter(_.timestamp != machine.timestamp)
machines += machineId -> (machine :: logs)
()
}
override def getInfoLogs(machineId: MachineId): Future[List[MachineInfo]] =
Future {
machines.getOrElse(machineId, List.empty)
}
}
| meloniasty/ActyxPowerUserAlert | src/main/scala/actyxpoweruseralert/services/MachineInfoLogStorageService.scala | Scala | mit | 2,219 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.orc
import java.util.Locale
import org.apache.orc.OrcConf.COMPRESS
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
import org.apache.spark.sql.internal.SQLConf
/**
* Options for the ORC data source.
*/
class OrcOptions(
@transient private val parameters: CaseInsensitiveMap[String],
@transient private val sqlConf: SQLConf)
extends Serializable {
import OrcOptions._
def this(parameters: Map[String, String], sqlConf: SQLConf) =
this(CaseInsensitiveMap(parameters), sqlConf)
/**
* Compression codec to use.
* Acceptable values are defined in [[shortOrcCompressionCodecNames]].
*/
val compressionCodec: String = {
// `compression`, `orc.compress`(i.e., OrcConf.COMPRESS), and `spark.sql.orc.compression.codec`
// are in order of precedence from highest to lowest.
val orcCompressionConf = parameters.get(COMPRESS.getAttribute)
val codecName = parameters
.get("compression")
.orElse(orcCompressionConf)
.getOrElse(sqlConf.orcCompressionCodec)
.toLowerCase(Locale.ROOT)
if (!shortOrcCompressionCodecNames.contains(codecName)) {
val availableCodecs = shortOrcCompressionCodecNames.keys.map(_.toLowerCase(Locale.ROOT))
throw new IllegalArgumentException(s"Codec [$codecName] " +
s"is not available. Available codecs are ${availableCodecs.mkString(", ")}.")
}
shortOrcCompressionCodecNames(codecName)
}
}
object OrcOptions {
// The ORC compression short names
private val shortOrcCompressionCodecNames = Map(
"none" -> "NONE",
"uncompressed" -> "NONE",
"snappy" -> "SNAPPY",
"zlib" -> "ZLIB",
"lzo" -> "LZO")
}
| cin/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/orc/OrcOptions.scala | Scala | apache-2.0 | 2,510 |
/*
* Copyright (c) 2014-2015 by its authors. Some rights reserved.
* See the project homepage at: http://www.monifu.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monifu.reactive.internals.operators
import monifu.concurrent.Scheduler
import monifu.reactive.subjects.ReplaySubject
import monifu.reactive.{Ack, Observable, Observer, Subject}
import scala.concurrent.Future
private[reactive] object repeat {
/**
* Implementation for [[Observable.repeat]].
*/
def elements[T](source: Observable[T]): Observable[T] = {
// recursive function - subscribes the observer again when
// onComplete happens
def loop(subject: Subject[T, T], observer: Observer[T])(implicit s: Scheduler): Unit =
subject.onSubscribe(new Observer[T] {
def onNext(elem: T) = {
observer.onNext(elem)
}
def onError(ex: Throwable) =
observer.onError(ex)
def onComplete(): Unit =
loop(subject, observer)
})
Observable.create { subscriber =>
import subscriber.{scheduler => s}
val subject = ReplaySubject[T]()
loop(subject, subscriber)
source.onSubscribe(new Observer[T] {
def onNext(elem: T): Future[Ack] = {
subject.onNext(elem)
}
def onError(ex: Throwable): Unit = {
subject.onError(ex)
}
def onComplete(): Unit = {
subject.onComplete()
}
})
}
}
/** Implementation for [[monifu.reactive.Observable.repeatTask]] */
def task[T](t: => T): Observable[T] = {
Observable.fromIterator(new TaskIterator[T](t))
}
private final class TaskIterator[T](t: => T) extends Iterator[T] {
val hasNext = true
def next(): T = t
}
}
| sergius/monifu | monifu/shared/src/main/scala/monifu/reactive/internals/operators/repeat.scala | Scala | apache-2.0 | 2,254 |
/*
* Copyright (C) FuseSource, Inc.
* http://fusesource.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fusesource.fabric.monitor.plugins.jmx
import java.util.ArrayList
import java.util.List
import org.fusesource.fabric.monitor.api.DataSourceDTO
/**
* Represents a tree of data source values
*
* @author <a href="http://hiramchirino.com">Hiram Chirino</a>
*/
class DataSourceGroup(var id: String=null, var name: String=null ) {
var description: String = null
var children: List[DataSourceGroup] = new ArrayList[DataSourceGroup]
var data_sources: List[DataSourceDTO] = new ArrayList[DataSourceDTO]
def dump(indent: Int, concise: Boolean) {
import collection.JavaConversions._
def print_indent(indent: Int) = for (i <- 0.to(indent)) {
print(" ")
}
print_indent(indent)
println(if (concise) id else this)
val new_indent = indent + 1
for (child <- children) {
child.dump(new_indent, concise)
}
for (ds <- data_sources) {
print_indent(new_indent)
println(if (concise) ds.id else ds)
}
}
}
| janstey/fuse | sandbox/fabric-monitor/src/main/scala/org/fusesource/fabric/monitor/plugins/jmx/DataSourceGroup.scala | Scala | apache-2.0 | 1,602 |
/*
* Copyright 2013 Maurício Linhares
*
* Maurício Linhares licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.github.mauricio.async.db.postgresql.util
import java.nio.charset.Charset
import java.security.MessageDigest
object PasswordHelper {
private final val Lookup = Array[Byte]('0', '1', '2', '3', '4', '5', '6', '7', '8', '9',
'a', 'b', 'c', 'd', 'e', 'f')
private def bytesToHex( bytes : Array[Byte], hex : Array[Byte], offset : Int ) {
var pos = offset
var i = 0
while ( i < 16 ) {
val c = bytes(i) & 0xff
var j = c >> 4
hex(pos) = Lookup(j)
pos += 1
j = (c & 0xf)
hex(pos) = Lookup(j)
pos += 1
i += 1
}
}
def encode( userText : String, passwordText : String, salt : Array[Byte], charset : Charset ) : Array[Byte] = {
val user = userText.getBytes(charset)
val password = passwordText.getBytes(charset)
val md = MessageDigest.getInstance("MD5")
md.update(password)
md.update(user)
val tempDigest = md.digest()
val hexDigest = new Array[Byte](35)
bytesToHex(tempDigest, hexDigest, 0)
md.update(hexDigest, 0, 32)
md.update(salt)
val passDigest = md.digest()
bytesToHex(passDigest, hexDigest, 3)
hexDigest(0) = 'm'
hexDigest(1) = 'd'
hexDigest(2) = '5'
hexDigest
}
}
| outbrain/postgresql-async | postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/PasswordHelper.scala | Scala | apache-2.0 | 1,871 |
package com.blrest.endpoint
import akka.actor.Actor
import com.codahale.metrics.MetricRegistry
import com.blrest.dao.{TagDao, ImageDirectoryDao}
import org.json4s.DefaultFormats
import spray.routing.RejectionHandler
/**
* Created by ccarrier for bl-rest.
* at 5:54 PM on 12/17/13
*/
trait MasterInjector extends Actor with ImageDirectoryEndpoint with TagEndpoint {
val imageDirectoryDao: ImageDirectoryDao
val tagDao: TagDao
val json4sJacksonFormats = DefaultFormats
def actorRefFactory = context
def receive = runRoute(imageDirectoryRoute ~ tagRoute)
}
| ctcarrier/bl-rest | src/main/scala/com/blrest/endpoint/MasterInjector.scala | Scala | mit | 575 |
package org.typelevel.tagged
import org.scalacheck.Prop._
import org.scalacheck.Properties
import org.typelevel.tagged.TestUtils._
import org.typelevel.tagged.tag1._
class Tag1DisassembledAnyVal {
val base: Int = 1
val tagged: Int @@ SomeTag = tag(1)
val untagged: Int = untag(tagged)
val arrayWithTagged: Array[Int @@ SomeTag] = Array(tag(1), tag(2))
}
class Tag1DisassembledAnyRef {
val base: String = "1"
val tagged: String @@ SomeTag = tag("1")
val untagged: String = untag(tagged)
val arrayWithTagged: Array[String @@ SomeTag] = Array(tag("1"), tag("2"))
}
object Tag1SpecJvm extends Properties("tag1") {
property("javap.AnyVal") = secure {
val actual = javapOutput(new Tag1DisassembledAnyVal)
val expected =
"""Compiled from "Tag1SpecJvm.scala"
|public class org.typelevel.tagged.Tag1DisassembledAnyVal {
| public int base();
| public java.lang.Object tagged();
| public int untagged();
| public java.lang.Object[] arrayWithTagged();
| public org.typelevel.tagged.Tag1DisassembledAnyVal();
|}
""".stripMargin.trim
actual ?= expected
}
property("javap.AnyRef") = secure {
val actual = javapOutput(new Tag1DisassembledAnyRef)
val expected =
"""Compiled from "Tag1SpecJvm.scala"
|public class org.typelevel.tagged.Tag1DisassembledAnyRef {
| public java.lang.String base();
| public java.lang.String tagged();
| public java.lang.String untagged();
| public java.lang.String[] arrayWithTagged();
| public org.typelevel.tagged.Tag1DisassembledAnyRef();
|}
""".stripMargin.trim
actual ?= expected
}
}
| fthomas/tagged | core/.jvm/src/test/scala/org/typelevel/tagged/Tag1SpecJvm.scala | Scala | apache-2.0 | 1,700 |
package com.aberdyne.graphchat
object ProtocolManager {
def list(db: DatabaseInterface): List[Protocol] = { Nil }
} | Kenishi/GraphChat | src/main/com/aberdyne/graphchat/ProtocolManager.scala | Scala | apache-2.0 | 117 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.iterators
import java.util.{Collection => jCollection, Map => jMap}
import com.typesafe.scalalogging.LazyLogging
import org.apache.accumulo.core.client.IteratorSetting
import org.apache.accumulo.core.data.{ByteSequence, Key, Range, Value}
import org.apache.accumulo.core.iterators.{IteratorEnvironment, SortedKeyValueIterator}
import org.geotools.factory.Hints
import org.geotools.filter.text.ecql.ECQL
import org.locationtech.geomesa.accumulo.AccumuloFeatureIndexType
import org.locationtech.geomesa.accumulo.index.AccumuloFeatureIndex
import org.locationtech.geomesa.features.SerializationOption.SerializationOptions
import org.locationtech.geomesa.features.kryo.KryoBufferSimpleFeature
import org.locationtech.geomesa.index.iterators.{IteratorCache, SamplingIterator}
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.filter.Filter
import scala.util.control.NonFatal
/**
* Iterator that operates on kryo encoded values. It will:
* a) evaluate the feature against an optional filter
* b) apply an optional transform
*
* Uses lazy evaluation of attributes and binary transforms when possible.
*/
class KryoLazyFilterTransformIterator extends
SortedKeyValueIterator[Key, Value] with SamplingIterator with LazyLogging {
import KryoLazyFilterTransformIterator._
var source: SortedKeyValueIterator[Key, Value] = _
val topValue: Value = new Value()
var sft: SimpleFeatureType = _
var filter: (SimpleFeature) => Boolean = _
var reusableSf: KryoBufferSimpleFeature = _
var setId: () => Unit = _
var hasTransform: Boolean = _
override def init(src: SortedKeyValueIterator[Key, Value],
options: jMap[String, String],
env: IteratorEnvironment): Unit = {
this.source = src
val spec = options.get(SFT_OPT)
sft = IteratorCache.sft(spec)
val index = try { AccumuloFeatureIndex.index(options.get(INDEX_OPT)) } catch {
case NonFatal(_) => throw new RuntimeException(s"Index option not configured correctly: ${options.get(INDEX_OPT)}")
}
// noinspection ScalaDeprecation
val kryoOptions = if (index.serializedWithId) SerializationOptions.none else SerializationOptions.withoutId
reusableSf = IteratorCache.serializer(spec, kryoOptions).getReusableFeature
val transform = Option(options.get(TRANSFORM_DEFINITIONS_OPT))
val transformSchema = Option(options.get(TRANSFORM_SCHEMA_OPT))
for { t <- transform; ts <- transformSchema } {
reusableSf.setTransforms(t, IteratorCache.sft(ts))
}
hasTransform = transform.isDefined
val cql = Option(options.get(CQL_OPT)).map(IteratorCache.filter(sft, spec, _))
// TODO: can we optimize the configuration of sampling
val sampling = sample(options)
filter = (cql, sampling) match {
case (None, None) => (_) => true
case (Some(c), None) => c.evaluate
case (None, Some(s)) => s
case (Some(c), Some(s)) => (sf) => c.evaluate(sf) && s(sf)
}
// noinspection ScalaDeprecation
setId = if (index.serializedWithId || cql.isEmpty) { () => {} } else {
val getFromRow = index.getIdFromRow(sft)
() => {
val row = source.getTopKey.getRow()
reusableSf.setId(getFromRow(row.getBytes, 0, row.getLength))
}
}
}
override def seek(range: Range, columnFamilies: jCollection[ByteSequence], inclusive: Boolean): Unit = {
source.seek(range, columnFamilies, inclusive)
findTop()
}
override def next(): Unit = {
source.next()
findTop()
}
override def hasTop: Boolean = source.hasTop
override def getTopKey: Key = source.getTopKey
override def getTopValue: Value =
if (hasTransform) {
topValue.set(reusableSf.transform())
topValue
} else {
source.getTopValue
}
def findTop(): Unit = {
var found = false
while (!found && source.hasTop) {
reusableSf.setBuffer(source.getTopValue.get())
setId()
if (filter(reusableSf)) {
found = true
} else {
source.next()
}
}
}
override def deepCopy(env: IteratorEnvironment): SortedKeyValueIterator[Key, Value] = {
val iter = new KryoLazyFilterTransformIterator
iter.source = source.deepCopy(env)
iter.sft = sft
iter.filter = filter
iter.reusableSf = reusableSf.copy()
iter.hasTransform = hasTransform
iter
}
}
object KryoLazyFilterTransformIterator {
import org.locationtech.geomesa.index.conf.QueryHints.RichHints
val SFT_OPT = "sft"
val INDEX_OPT = "index"
val CQL_OPT = "cql"
val TRANSFORM_SCHEMA_OPT = "tsft"
val TRANSFORM_DEFINITIONS_OPT = "tdefs"
val DefaultPriority = 25
def configure(sft: SimpleFeatureType,
index: AccumuloFeatureIndexType,
filter: Option[Filter],
hints: Hints): Option[IteratorSetting] =
configure(sft, index, filter, hints.getTransform, hints.getSampling)
def configure(sft: SimpleFeatureType,
index: AccumuloFeatureIndexType,
filter: Option[Filter],
transform: Option[(String, SimpleFeatureType)],
sampling: Option[(Float, Option[String])],
priority: Int = DefaultPriority): Option[IteratorSetting] = {
if (filter.isDefined || transform.isDefined || sampling.isDefined) {
val is = new IteratorSetting(priority, "filter-transform-iter", classOf[KryoLazyFilterTransformIterator])
is.addOption(SFT_OPT, SimpleFeatureTypes.encodeType(sft, includeUserData = true))
is.addOption(INDEX_OPT, index.identifier)
filter.foreach(f => is.addOption(CQL_OPT, ECQL.toCQL(f)))
transform.foreach { case (tdef, tsft) =>
is.addOption(TRANSFORM_DEFINITIONS_OPT, tdef)
is.addOption(TRANSFORM_SCHEMA_OPT, SimpleFeatureTypes.encodeType(tsft))
}
sampling.foreach(SamplingIterator.configure(sft, _).foreach { case (k, v) => is.addOption(k, v) })
Some(is)
} else {
None
}
}
} | ronq/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/iterators/KryoLazyFilterTransformIterator.scala | Scala | apache-2.0 | 6,624 |
// lchannels - session programming in Scala
// Copyright (c) 2016, Alceste Scalas and Imperial College London
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
/** @author Alceste Scalas <[email protected]> */
package lchannels.util
import scala.concurrent.ExecutionContext
import scala.concurrent.duration.Duration
import lchannels._
///////////////////////////////////////////////////////////////////////////////
// Session type (from output endpoint):
// rec X . !Datum(T). X
///////////////////////////////////////////////////////////////////////////////
private case class Datum[T](value: T)(val cont: In[Datum[T]])
/** Interface for FIFO queues. */
trait Fifo[T] {
/** Return and remove the value at the head of the queue.
*
* This method blocks if the queue is empty,
* until a value can be retrieved, or the given timeout expires.
*
* @param timeout Maximum wait time when the queue is empty.
*
* @throws java.util.concurrent.TimeoutException if `timeout` expires
* @throws java.util.concurrent.InterruptedException if thread is interrupted
*/
def read(implicit timeout: Duration): T
/** Append a value to the queue.
*
* @param value Value to be appended.
*/
def write(value: T): Unit
}
/** Simple implementation of FIFO queue.
*
* @param factory Used internally to create [[In]]/[[Out]] instances.
*/
protected class FifoImpl[T](factory: () => (In[Datum[T]], Out[Datum[T]]))
extends Fifo[T] {
private[this] var (in, out) = factory()
private[this] val inLock = new Object() // Guards "in" above
private[this] val outLock = new Object() // Guards "out" above
def read(implicit timeout: Duration): T = inLock.synchronized {
val res = in.receive
in = res.cont
res.value
}
def write(value: T): Unit = outLock.synchronized {
out = out !! Datum[T](value)_
}
}
/** Simple FIFO queue. */
object Fifo {
/** Return an empty FIFO, internally based on [[LocalChannel]]s. */
def apply[T](): Fifo[T] = {
new FifoImpl[T](LocalChannel.factory[Datum[T]])
}
/** Return an empty FIFO, internally based on [[QueueChannel]]s
*
* @param ec Execution context for [[QueueChannel]] creation.
*/
def apply[T](ec: ExecutionContext): Fifo[T] = {
new FifoImpl[T](() => QueueChannel.factory[Datum[T]]()(ec))
}
}
| scribble/scribble.github.io | src/main/jbake/assets/docs/lchannels/lchannels/src/main/scala/lchannels/util/Fifo.scala | Scala | apache-2.0 | 3,619 |
package slick.basic
import scala.language.{higherKinds, implicitConversions, existentials}
import slick.ast._
import slick.compiler.QueryCompiler
import slick.dbio._
import slick.lifted._
import slick.util.GlobalConfig
import com.typesafe.config.Config
/** The basic functionality that has to be implemented by all profiles. */
trait BasicProfile extends BasicActionComponent { self: BasicProfile =>
/** The external interface of this profile which defines the API. */
@deprecated("Use the Profile object directly instead of calling `.profile` on it", "3.2")
val profile: BasicProfile = this
/** The back-end type required by this profile */
type Backend <: BasicBackend
/** The back-end implementation for this profile */
val backend: Backend
/** The capabilities supported by this profile. This can be used to query at
* runtime whether a specific feature is supported. */
final val capabilities: Set[Capability] = computeCapabilities
/** Compute the capabilities. This should be overridden in subclasses as needed. */
protected def computeCapabilities: Set[Capability] = Set.empty
/** The type of a schema description (DDL) */
type SchemaDescription <: SchemaDescriptionDef
/** A schema description contains the SQL statements for creating and
* dropping database entities. Schema descriptions can be combined for
* creating or dropping multiple entities together, even if they have
* circular dependencies. */
trait SchemaDescriptionDef {
def ++(other: SchemaDescription): SchemaDescription
}
trait API extends Aliases with ExtensionMethodConversions {
type Database = Backend#Database
val Database = backend.Database
type Session = Backend#Session
type SlickException = slick.SlickException
implicit val slickProfile: self.type = self
@deprecated("User `slickProfile` instead of `slickDriver`", "3.2")
val slickDriver: self.type = slickProfile
implicit final def anyToShapedValue[T, U](value: T)(implicit shape: Shape[_ <: FlatShapeLevel, T, U, _]): ShapedValue[T, U] =
new ShapedValue[T, U](value, shape)
implicit def repQueryActionExtensionMethods[U](rep: Rep[U]): QueryActionExtensionMethods[U, NoStream] =
createQueryActionExtensionMethods[U, NoStream](queryCompiler.run(rep.toNode).tree, ())
implicit def streamableQueryActionExtensionMethods[U, C[_]](q: Query[_,U, C]): StreamingQueryActionExtensionMethods[C[U], U] =
createStreamingQueryActionExtensionMethods[C[U], U](queryCompiler.run(q.toNode).tree, ())
implicit def runnableCompiledQueryActionExtensionMethods[RU](c: RunnableCompiled[_, RU]): QueryActionExtensionMethods[RU, NoStream] =
createQueryActionExtensionMethods[RU, NoStream](c.compiledQuery, c.param)
implicit def streamableCompiledQueryActionExtensionMethods[RU, EU](c: StreamableCompiled[_, RU, EU]): StreamingQueryActionExtensionMethods[RU, EU] =
createStreamingQueryActionExtensionMethods[RU, EU](c.compiledQuery, c.param)
// Applying a CompiledFunction always results in only a RunnableCompiled, not a StreamableCompiled, so we need this:
implicit def streamableAppliedCompiledFunctionActionExtensionMethods[R, RU, EU, C[_]](c: AppliedCompiledFunction[_, Query[R, EU, C], RU]): StreamingQueryActionExtensionMethods[RU, EU] =
createStreamingQueryActionExtensionMethods[RU, EU](c.compiledQuery, c.param)
implicit def recordQueryActionExtensionMethods[M, R](q: M)(implicit shape: Shape[_ <: FlatShapeLevel, M, R, _]): QueryActionExtensionMethods[R, NoStream] =
createQueryActionExtensionMethods[R, NoStream](queryCompiler.run(shape.toNode(q)).tree, ())
}
/** The API for using the query language with a single import
* statement. This provides the profile's implicits, the Database API
* and commonly used query language types and objects. */
val api: API
/** The compiler used for queries */
def queryCompiler: QueryCompiler
/** The compiler used for updates */
def updateCompiler: QueryCompiler
/** The compiler used for deleting data */
def deleteCompiler: QueryCompiler
/** The compiler used for inserting data */
def insertCompiler: QueryCompiler
/** The type of a (partially) compiled AST for Insert operations. Unlike
* querying or deleting, inserts may require different compilation results
* which should be computed lazily. */
type CompiledInsert
/** (Partially) ompile an AST for insert operations */
def compileInsert(n: Node): CompiledInsert
/* internal: */
/** The configuration for this profile, loaded via [[loadProfileConfig]]. */
protected[this] final lazy val profileConfig: Config = loadProfileConfig
/** Load the configuration for this profile. This can be overridden in user-defined
* subclasses to load different configurations.
*
* The default implementation does a breadth-first search in the supertype hierarchy of the
* runtime class until it finds a class or trait with a name matching "slick.[...]Profile"
* and then returns uses this name as a path in the application config. If no configuration
* exists at this path, an empty Config object is returned. */
protected[this] def loadProfileConfig: Config = {
def findConfigName(classes: Vector[Class[_]]): Option[String] =
classes.iterator.map { cl =>
val n = cl.getName
if(n.startsWith("slick.") && n.endsWith("Profile")) Some(n) else None
}.find(_.isDefined).getOrElse {
val parents = classes.flatMap { cl => Option(cl.getSuperclass) ++: cl.getInterfaces.toVector }
if(parents.isEmpty) None else findConfigName(parents)
}
GlobalConfig.profileConfig(findConfigName(Vector(getClass)).get)
}
override def toString = {
val n = getClass.getName
if(n.startsWith("slick.") && n.endsWith("Profile$")) n
else super.toString
}
}
trait BasicActionComponent { self: BasicProfile =>
type ProfileAction[+R, +S <: NoStream, -E <: Effect] <: BasicAction[R, S, E]
type StreamingProfileAction[+R, +T, -E <: Effect] <: BasicStreamingAction[R, T, E] with ProfileAction[R, Streaming[T], E]
@deprecated("Use `ProfileAction` instead of `DriverAction`", "3.2")
final type DriverAction[+R, +S <: NoStream, -E <: Effect] = ProfileAction[R, S, E]
@deprecated("Use `StreamingProfileAction` instead of `StreamingDriverAction`", "3.2")
final type StreamingDriverAction[+R, +T, -E <: Effect] = StreamingProfileAction[R, T, E]
//////////////////////////////////////////////////////////// Query Actions
type QueryActionExtensionMethods[R, S <: NoStream] <: QueryActionExtensionMethodsImpl[R, S]
type StreamingQueryActionExtensionMethods[R, T] <: StreamingQueryActionExtensionMethodsImpl[R, T]
def createQueryActionExtensionMethods[R, S <: NoStream](tree: Node, param: Any): QueryActionExtensionMethods[R, S]
def createStreamingQueryActionExtensionMethods[R, T](tree: Node, param: Any): StreamingQueryActionExtensionMethods[R, T]
trait QueryActionExtensionMethodsImpl[R, S <: NoStream] {
/** An Action that runs this query. */
def result: ProfileAction[R, S, Effect.Read]
}
trait StreamingQueryActionExtensionMethodsImpl[R, T] extends QueryActionExtensionMethodsImpl[R, Streaming[T]] {
def result: StreamingProfileAction[R, T, Effect.Read]
}
}
trait BasicAction[+R, +S <: NoStream, -E <: Effect] extends DatabaseAction[R, S, E] {
type ResultAction[+R, +S <: NoStream, -E <: Effect] <: BasicAction[R, S, E]
}
trait BasicStreamingAction[+R, +T, -E <: Effect] extends BasicAction[R, Streaming[T], E] {
/** Create an Action that returns only the first value of this stream of data. The Action will
* fail if the stream is empty. Only available on streaming Actions. */
def head: ResultAction[T, NoStream, E]
/** Create an Action that returns only the first value of this stream of data as an `Option`.
* Only available on streaming Actions. */
def headOption: ResultAction[Option[T], NoStream, E]
}
trait FixedBasicAction[+R, +S <: NoStream, -E <: Effect] extends BasicAction[R, S, E] {
type ResultAction[+R, +S <: NoStream, -E <: Effect] = BasicAction[R, S, E]
}
trait FixedBasicStreamingAction[+R, +T, -E <: Effect] extends BasicStreamingAction[R, T, E] with FixedBasicAction[R, Streaming[T], E]
| Radsaggi/slick | slick/src/main/scala/slick/basic/BasicProfile.scala | Scala | bsd-2-clause | 8,280 |
/*
* Copyright (c) 2011-16 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
import scala.language.experimental.macros
import scala.annotation.tailrec
import scala.reflect.macros.whitebox
/**
* Base trait for type level natural numbers.
*
* @author Miles Sabin
*/
trait Nat {
type N <: Nat
}
/**
* Encoding of successor.
*
* @author Miles Sabin
*/
case class Succ[P <: Nat]() extends Nat {
type N = Succ[P]
}
/**
* Encoding of zero.
*
* @author Miles Sabin
*/
class _0 extends Nat with Serializable {
type N = _0
}
/**
* Type level encoding of the natural numbers.
*
* @author Miles Sabin
*/
object Nat extends Nats {
import ops.nat._
import syntax.NatOps
implicit def apply(i: Int): Nat = macro NatMacros.materializeWidened
/** The natural number 0 */
type _0 = shapeless._0
val _0: _0 = new _0
def toInt[N <: Nat](implicit toIntN : ToInt[N]) = toIntN()
def toInt(n : Nat)(implicit toIntN : ToInt[n.N]) = toIntN()
implicit def natOps[N <: Nat](n : N) : NatOps[N] = new NatOps(n)
}
@macrocompat.bundle
class NatMacros(val c: whitebox.Context) extends NatMacroDefns {
import c.universe._
def materializeWidened(i: Tree): Tree =
i match {
case NatLiteral(n) => mkNatValue(n)
case _ =>
c.abort(c.enclosingPosition, s"Expression $i does not evaluate to a non-negative Int literal")
}
}
@macrocompat.bundle
trait NatMacroDefns {
val c: whitebox.Context
import c.universe._
object NatLiteral {
def unapply(i: Tree): Option[Int] =
i match {
case Literal(Constant(n: Int)) if n >= 0 => Some(n)
case _ => None
}
}
def mkNatTpt(i: Int): Tree = {
val succSym = typeOf[Succ[_]].typeConstructor.typeSymbol
val _0Sym = typeOf[_0].typeSymbol
@tailrec
def loop(i: Int, acc: Tree): Tree = {
if(i == 0) acc
else loop(i-1, AppliedTypeTree(Ident(succSym), List(acc)))
}
loop(i, Ident(_0Sym))
}
def mkNatTpe(i: Int): Type = {
val succTpe = typeOf[Succ[_]].typeConstructor
val _0Tpe = typeOf[_0]
@tailrec
def loop(i: Int, acc: Type): Type = {
if(i == 0) acc
else loop(i-1, appliedType(succTpe, acc))
}
loop(i, _0Tpe)
}
def mkNatValue(i: Int): Tree =
q""" new ${mkNatTpt(i)} """
}
| rorygraves/perf_tester | corpus/shapeless/src/main/scala/shapeless/nat.scala | Scala | apache-2.0 | 2,822 |
package com.nidkil.downloader.manager
import java.io.File
import java.net.URL
import org.scalatest.FunSpec
import org.scalatest.Matchers
import org.scalatest.Tag
import com.nidkil.downloader.cleaner.DefaultCleaner
import com.nidkil.downloader.datatypes.Download
import com.nidkil.downloader.merger.DefaultMerger
import com.nidkil.downloader.splitter.DefaultSplitter
import com.nidkil.downloader.utils.Checksum
import com.nidkil.downloader.utils.Timer
import com.nidkil.downloader.utils.UrlUtils
import com.nidkil.downloader.merger.Merger
import org.apache.commons.io.FileUtils
class DownloadManagerTest extends FunSpec with Matchers {
def curDir = new java.io.File(".").getCanonicalPath
describe("A DownloadManager") {
val splitter = new DefaultSplitter()
val merger = new DefaultMerger()
val cleanup = new DefaultCleaner()
val manager = new DefaultDownloadManager(splitter, merger, cleanup)
it("should download the specified file as chunks, merge the chunks and validate the merged file size", Tag("integration")) {
val timer = new Timer()
timer.start
val url = new URL("http://download.thinkbroadband.com/20MB.zip")
val id = Checksum.calculate(url.toString)
val downloadDir = new File(curDir, s"download")
val workDir = new File(downloadDir, id)
val destFile = new File(downloadDir, UrlUtils.extractFilename(url))
val download = new Download(id, url, destFile, workDir)
manager.execute(download, DefaultSplitter.ratioMinMaxStrategy)
info("destination file must exist")
assert(destFile.exists)
info("working directory must not exist")
assert(!workDir.exists)
FileUtils.forceDelete(destFile)
timer.stop
info(s"Exectime: ${timer.execTime()}")
}
it("should download the specified file as chunks, merge the chunks and validate the merged file checksum", Tag("integration")) {
val timer = new Timer()
timer.start
val url = new URL("http://apache.hippo.nl/tomcat/tomcat-7/v7.0.56/bin/apache-tomcat-7.0.56.tar.gz")
val id = Checksum.calculate(url.toString)
val downloadDir = new File(curDir, s"download")
val workDir = new File(downloadDir, id)
val destFile = new File(downloadDir, UrlUtils.extractFilename(url))
val download = new Download(Checksum.calculate(url.toString), url, destFile, workDir, "2887d0e3ca18bdca63004a0388c99775")
manager.execute(download, DefaultSplitter.ratioMaxStrategy)
info("destination file must exist")
assert(destFile.exists)
info("working directory must not exist")
assert(!workDir.exists)
FileUtils.forceDelete(destFile)
timer.stop
info(s"Exectime: ${timer.execTime()}")
}
it("should throw an exception if the checksum does not match", Tag("integration")) {
val timer = new Timer()
timer.start
val url = new URL("http://apache.hippo.nl/tomcat/tomcat-7/v7.0.56/bin/apache-tomcat-7.0.56-deployer.zip")
val id = Checksum.calculate(url.toString)
val downloadDir = new File(curDir, s"download")
val workDir = new File(downloadDir, id)
val destFile = new File(downloadDir, UrlUtils.extractFilename(url))
val download = new Download(Checksum.calculate(url.toString), url, destFile, workDir, "b3215c06647bc550406a9c8ccc378756")
intercept[DownloadManagerException] {
manager.execute(download, DefaultSplitter.ratioStrategy)
}
FileUtils.forceDelete(workDir)
timer.stop
info(s"Exectime: ${timer.execTime()}")
}
}
} | nidkil/scala-downloader | src/test/scala/com/nidkil/downloader/manager/DownloadManagerTest.scala | Scala | apache-2.0 | 3,579 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.examples
import java.io.File
import org.apache.spark.sql.{CarbonEnv, SaveMode, SparkSession}
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.metadata.CarbonTableIdentifier
import org.apache.carbondata.core.util.CarbonProperties
import org.apache.carbondata.examples.util.ExampleUtils
/**
* This example is for showing how to create external table with location.
*/
object ExternalTableExample {
def main(args: Array[String]) {
val spark = ExampleUtils.createSparkSession("ExternalTableExample")
exampleBody(spark)
spark.close()
}
def exampleBody(spark : SparkSession): Unit = {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd")
// Create origin_table
spark.sql("DROP TABLE IF EXISTS origin_table")
spark.sql(
s"""
| CREATE TABLE origin_table(
| shortField SHORT,
| intField INT,
| bigintField LONG,
| doubleField DOUBLE,
| stringField STRING,
| timestampField TIMESTAMP,
| decimalField DECIMAL(18,2),
| dateField DATE,
| charField CHAR(5),
| floatField FLOAT
| )
| STORED AS carbondata
""".stripMargin)
val rootPath = new File(this.getClass.getResource("/").getPath
+ "../../../..").getCanonicalPath
val path = s"$rootPath/examples/spark/src/main/resources/data.csv"
// load 4 times, each load has 10 rows data
// scalastyle:off
(1 to 4).foreach(_ => spark.sql(
s"""
| LOAD DATA LOCAL INPATH '$path'
| INTO TABLE origin_table
| OPTIONS('HEADER'='true', 'COMPLEX_DELIMITER_LEVEL_1'='#')
""".stripMargin))
// scalastyle:on
// 40 rows
spark.sql("SELECT count(*) FROM origin_table").show()
val origin_table_path = CarbonEnv.getTablePath(Some("default"), "origin_table")(spark)
// Create external_table
spark.sql("DROP TABLE IF EXISTS external_table")
spark.sql("CREATE EXTERNAL TABLE external_table STORED AS carbondata" +
s" LOCATION '$origin_table_path'")
spark.sql("SELECT count(*) FROM external_table").show()
// Load 2 times again
(1 to 2).foreach(_ => spark.sql(
s"""
| LOAD DATA LOCAL INPATH '$path'
| INTO TABLE origin_table
| OPTIONS('HEADER'='true', 'COMPLEX_DELIMITER_LEVEL_1'='#')
""".stripMargin))
spark.sql("SELECT count(*) FROM external_table").show()
// Drop tables
spark.sql("DROP TABLE IF EXISTS origin_table")
spark.sql("DROP TABLE IF EXISTS external_table")
}
}
| zzcclp/carbondata | examples/spark/src/main/scala/org/apache/carbondata/examples/ExternalTableExample.scala | Scala | apache-2.0 | 3,521 |
/**
* Created by cuz on 6/6/17.
*/
object ScalacOptions {
val all = Seq(
"-deprecation", // Emit warning and location for usages of deprecated APIs.
"-encoding", "utf-8", // Specify character encoding used by source files.
"-explaintypes", // Explain type errors in more detail.
"-feature", // Emit warning and location for usages of features that should be imported explicitly.
"-language:existentials", // Existential types (besides wildcard types) can be written and inferred
"-language:experimental.macros", // Allow macro definition (besides implementation and application)
"-language:higherKinds", // Allow higher-kinded types
"-language:implicitConversions", // Allow definition of implicit functions called views
"-unchecked", // Enable additional warnings where generated code depends on assumptions.
"-Xcheckinit", // Wrap field accessors to throw an exception on uninitialized access.
"-Xfatal-warnings", // Fail the compilation if there are any warnings.
"-Xfuture", // Turn on future language features.
"-Xlint:adapted-args", // Warn if an argument list is modified to match the receiver.
"-Xlint:by-name-right-associative", // By-name parameter of right associative operator.
"-Xlint:constant", // Evaluation of a constant arithmetic expression results in an error.
"-Xlint:delayedinit-select", // Selecting member of DelayedInit.
"-Xlint:doc-detached", // A Scaladoc comment appears to be detached from its element.
"-Xlint:inaccessible", // Warn about inaccessible types in method signatures.
"-Xlint:infer-any", // Warn when a type argument is inferred to be `Any`.
"-Xlint:missing-interpolator", // A string literal appears to be missing an interpolator id.
"-Xlint:nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'.
"-Xlint:nullary-unit", // Warn when nullary methods return Unit.
"-Xlint:option-implicit", // Option.apply used implicit view.
"-Xlint:package-object-classes", // Class or object defined in package object.
"-Xlint:poly-implicit-overload", // Parameterized overloaded implicit methods are not visible as view bounds.
"-Xlint:private-shadow", // A private field (or class parameter) shadows a superclass field.
"-Xlint:stars-align", // Pattern sequence wildcard must align with sequence component.
"-Xlint:type-parameter-shadow", // A local type parameter shadows a type already in scope.
"-Xlint:unsound-match", // Pattern match may not be typesafe.
"-Yno-adapted-args", // Do not adapt an argument list (either by inserting () or creating a tuple) to match the receiver.
"-Ypartial-unification", // Enable partial unification in type constructor inference
"-Ywarn-dead-code", // Warn when dead code is identified.
"-Ywarn-extra-implicit", // Warn when more than one implicit parameter section is defined.
"-Ywarn-inaccessible", // Warn about inaccessible types in method signatures.
"-Ywarn-infer-any", // Warn when a type argument is inferred to be `Any`.
"-Ywarn-nullary-override", // Warn when non-nullary `def f()' overrides nullary `def f'.
"-Ywarn-nullary-unit", // Warn when nullary methods return Unit.
"-Ywarn-numeric-widen", // Warn when numerics are widened.
"-Ywarn-unused:implicits", // Warn if an implicit parameter is unused.
"-Ywarn-unused:imports", // Warn if an import selector is not referenced.
"-Ywarn-unused:locals", // Warn if a local definition is unused.
"-Ywarn-unused:params", // Warn if a value parameter is unused.
"-Ywarn-unused:patvars", // Warn if a variable bound in a pattern is unused.
"-Ywarn-unused:privates", // Warn if a private member is unused.
"-Ywarn-value-discard" // Warn when non-Unit expression results are unused.
)
val tlsOption = Seq(
"-Yinduction-heuristics", // speeds up the compilation of inductive implicit resolution
"-Ykind-polymorphism", // type and method definitions with type parameters of arbitrary kinds
"-Yliteral-types", // literals can appear in type position
"-Xstrict-patmat-analysis", // more accurate reporting of failures of match exhaustivity
"-Xlint:strict-unsealed-patmat" // warn on inexhaustive matches against unsealed traits
)
}
| cuzfrog/scala_sbt_template | project/ScalacOptions.scala | Scala | apache-2.0 | 4,285 |
/*
* Copyright 2007-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package builtin
package snippet
import scala.xml._
import net.liftweb.http._
import net.liftweb.util._
import net.liftweb.common._
import Helpers._
/**
* This object is the default handler for the <lift:form> tag, which
* is used to perform AJAX submission of form contents. If the "onsubmit"
* attribute is set on this tag, then the contents there will be run prior
* to the actual AJAX call. If a "postsubmit" attribute is present on the
* tag, then its contents will be executed after successful submission of
* the form.
*/
object Form extends DispatchSnippet {
def dispatch : DispatchIt = {
case "render" => render _
case "ajax" => render _
case "post" => post _
}
/**
* Add the post method and postback (current URL) as action.
* If the multipart attribute is specified, set the enctype
* as "multipart/form-data"
*/
def post(kids: NodeSeq): NodeSeq = {
// yeah it's ugly, but I'm not sure
// we could do it reliably with pattern matching
// dpp Oct 29, 2010
val ret: Elem =
if (kids.length == 1 &&
kids(0).isInstanceOf[Elem] &&
(kids(0).prefix eq null) &&
kids(0).label == "form") {
val e = kids(0).asInstanceOf[Elem]
val meta =
new UnprefixedAttribute("method", "post",
new UnprefixedAttribute(
"action", S.uri,
e.attributes.filter {
case up: UnprefixedAttribute =>
up.key != "method" && up.key != "action"
case x => true
}))
new Elem(null, "form", meta , e.scope, e.child :_*)
} else {
<form method="post" action={S.uri}>{kids}</form>
}
S.attr("multipart") match {
case Full(x) if Helpers.toBoolean(x) => ret % ("enctype" -> "multipart/form-data")
case _ => ret
}
}
def render(kids: NodeSeq) : NodeSeq = {
// yeah it's ugly, but I'm not sure
// we could do it reliably with pattern matching
// dpp Oct 29, 2010
if (kids.length == 1 &&
kids(0).isInstanceOf[Elem] &&
(kids(0).prefix eq null) &&
kids(0).label == "form") {
new Elem(null, "form", addAjaxForm , TopScope, kids(0).child :_*)
} else {
Elem(null, "form", addAjaxForm, TopScope, kids : _*)
}
}
private def addAjaxForm: MetaData = {
val id = Helpers.nextFuncName
val attr = S.currentAttrsToMetaData(name => name != "id" && name != "onsubmit" && name != "action")
val pre = S.attr.~("onsubmit").map(_.text + ";") getOrElse ""
val post = S.attr.~("postsubmit").map("function() { " + _.text + "; }")
val ajax: String = pre + SHtml.makeAjaxCall(LiftRules.jsArtifacts.serialize(id), AjaxContext.js(post)).toJsCmd + ";" + "return false;"
new UnprefixedAttribute("id", Text(id),
new UnprefixedAttribute("action", Text("javascript://"),
new UnprefixedAttribute("onsubmit", Text(ajax), attr)))
}
}
| pbrant/framework | web/webkit/src/main/scala/net/liftweb/builtin/snippet/Form.scala | Scala | apache-2.0 | 3,878 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.