code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
// Copyright 2019 EPFL DATA Lab (data.epfl.ch)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package squid
package ir
import utils._
import CollectionUtils.TraversableOnceHelper
import utils.meta.{RuntimeUniverseHelpers => ruh}
import ruh.{srum, sru}
import squid.lang.Base
import squid.lang.IntermediateBase
import squid.quasi.ModularEmbedding
import sru.{internal => srui}
import sru.{Type => ScalaType}
import scala.reflect.runtime.universe.TypeTag
import scala.collection.mutable
object ScalaTyping {
sealed abstract trait TypeHole[A <: String] // TODO make type hole traits extend this
}
import ScalaTyping._
trait ScalaTyping extends Base with TraceDebug {
self: IntermediateBase => // for 'repType' TODO rm
/** It can be useful being able to retrieve a ClassTag from a CodeType, especially when dealing with Arrays. */
implicit class ScalaTypingCodeTypeOps[T](self: CodeType[T]) {
import scala.reflect.{classTag, ClassTag}
def classTag: ClassTag[T] = ClassTag[T](runtimeClass)
def runtimeClass: Class[T] = sru.rootMirror.runtimeClass(self.rep.tpe).asInstanceOf[Class[T]]
def classTagCode: ClosedCode[ClassTag[T]] = {
implicit val T = self
import Predef.{Const => _, _}
code"ClassTag[T](${Const(runtimeClass)})"
}
}
type TypSymbol = sru.TypeSymbol
type TypParam = sru.FreeTypeSymbol
//type TypeRep = ScalaType
implicit class TypeRep(val tpe: ScalaType) {
override def toString = sru.show(tpe)
override def equals(that: Any) = that.isInstanceOf[TypeRep] && that.asInstanceOf[TypeRep].tpe =:= tpe
override def hashCode: Int = tpe.hashCode
}
object TypeRep { def unapply(t: TypeRep) = Some(t.tpe) }
implicit def toScala(tr: TypeRep): ScalaType = tr.tpe // TODO rm
class ExtractedType(val variance: Variance, val lb: ScalaType, val ub: ScalaType)
extends TypeRep(if (variance == Contravariant) ub else lb)
// ^ Note: sole purpose of `variance` field is to make the choice a little smarter and usually more appropriate...
// though chosing `ub` or `lb` here is kind of arbitrary; it means that when extracting a type that is soundly
// allowed to range within T0..T1, we pick T0 or T1 as the representative when a single type ends up being used.
// The intuition is that this is the 'most precise' type satisfying the constraint – similar to what Scala's type
// inference does (which sometimes has Nothing or Any inferred, depending on variance of where the type occurs).
// As an example, in: `Some(0) match { case code"Some($v:$t)" => ... }`, `t` is inferred as `+Int..Any`; here the
// intuitive thing to do when asked to materialize it (as in: pick a single type) is to make it `Int`, not `Any`.
{
assert(lb <:< ub, s"! $lb <:< $ub")
override lazy val toString = variance.symbol * variance.asInt.abs +
(if (lb =:= ub) s"$ub"
else if (lb <:< ruh.Nothing) s"..$ub"
else if (ruh.Any <:< ub) s"$lb.."
else s"$lb..$ub")
}
object ExtractedType {
def apply(variance: Variance, lb: ScalaType, ub: ScalaType) = new ExtractedType(variance,lb,ub)
def apply(vari: Variance, tpe: ScalaType): ExtractedType = vari match {
case Invariant => ExtractedType(vari,tpe,tpe)
case Covariant => ExtractedType(vari,tpe,ruh.Any)
case Contravariant => ExtractedType(vari,ruh.Nothing,tpe)
}
def ifNonEmpty(variance: Variance, lb: ScalaType, ub: ScalaType) = ExtractedType(variance, lb, ub) optionIf (lb <:< ub)
def unapply(tp: TypeRep): Some[(Variance, ScalaType, ScalaType)] = Some(tp match {
case et: ExtractedType => (et.variance, et.lb, et.ub)
case TypeRep(tp) => (Invariant, tp, tp)
})
}
override def mergeTypes(a: TypeRep, b: TypeRep): Option[ExtractedType] = (a,b) match {
case (ExtractedType(av,a0,a1), ExtractedType(bv,b0,b1)) =>
val v = if (av == bv) av else Invariant
if ((a0 <:< b0) && (b0 <:< a1)) Some(ExtractedType(v,b0,a1))
else if ((b0 <:< a0) && (a0 <:< b1)) Some(ExtractedType(v,a0,b1))
// ^ Note: two cases above kind of redundant with the one below, but they're probably be a bit faster/simpler to compute
else ExtractedType.ifNonEmpty(v, sru.lub(a0::b0::Nil), sru.glb(a1::b1::Nil))
}
def uninterpretedType[A: TypeTag]: TypeRep = sru.typeTag[A].tpe
//def typeApp(self: Rep, typ: TypSymbol, targs: List[TypeRep]): TypeRep = sru.internal.typeRef(repType(self), typ, targs map (_ tpe))
def typeApp(self: TypeRep, typ: TypSymbol, targs: List[TypeRep]): TypeRep =
sru.internal.typeRef(self, typ, targs map (_ tpe))
def staticTypeApp(typ: TypSymbol, targs: List[TypeRep]): TypeRep = {
if (typ.isStatic)
sru.internal.typeRef(typ.owner.asType.toType, typ, targs map (_ tpe))
else {
assert(typ.isParameter)
typ.toType
}
}
def valType(self: TypeRep, valName: String): TypeRep =
sru.internal.singleType(self, self.tpe.member(sru.TermName(valName)))
def constType(value: Any, underlying: TypeRep): TypeRep = constType(value)
def constType(value: Any): TypeRep = sru.internal.constantType(sru.Constant(value))
def typeHole(name: String): TypeRep = TypeHoleRep(name)
def typeParam(name: String): TypParam =
sru.internal.newFreeType(name, sru.Flag.PARAM)
def typLeq(a: TypeRep, b: TypeRep): Boolean = a <:< b
def weakTypLeq(a: TypeRep, b: TypeRep, va: Variance = Covariant): Boolean = extractType(a, b, va).isDefined
def lambdaType(paramTyps: List[TypeRep], ret: TypeRep): TypeRep =
ruh.FunctionType(paramTyps map (_ tpe): _*)(ret)
object TypeHoleRep {
import sru._
def apply(name: String) =
srui.typeRef(typeOf[ScalaTyping.type], symbolOf[TypeHole[_]], srui.constantType(Constant(name)) :: Nil)
//def unapply(tp: TypeRep) = tp.tpe match {
def unapply(tp: Type) = tp match {
case ht @ TypeRef(_, sym, arg :: Nil) if sym == symbolOf[TypeHole[_]] =>
//debug(arg,arg.getClass)
arg match {
case ConstantType(sru.Constant(name: String)) =>
Some(name)
case _ =>
System.err.println(s"Warning: hole type `$ht` has been widened.")
None
}
case _ => None
}
}
def hasHoles(tp: TypeRep) = tp exists { case TypeHoleRep(_) => true case _ => false }
def extractType(self: TypeRep, other: TypeRep, va: Variance): Option[Extract] = {
import sru._
import ruh._
debug(s"$va Match $other with $self")
nestDbg { self.tpe -> other.tpe match {
// TODO thisType, constantType
case TypeHoleRep(name) -> _ => Some(Map(), Map(name -> ExtractedType(va, other)), Map())
case RefinedType(ps0, scp0) -> PossibleRefinement(ps1, scp1) =>
if (ps0.size != ps1.size) return None
val ps = (ps0.iterator zipAnd ps1){extractType(_,_,va)}
val syms1 = mutable.HashMap(scp1 map (s => s.name -> s) toSeq: _*)
val scp = scp0.iterator flatMap { s0 =>
val s1 = syms1 getOrElse (s0.name, return None)
if (s0.alternatives.size != s1.alternatives.size) return None
(s0.alternatives zipAnd s1.alternatives) {
case (s0,s1) => extractType(s0.typeSignature, s1.typeSignature, va)
}
}
mergeAll(ps ++ scp)
//case TypeRef(tp0, sum0, targs0) -> xtyp =>
case typ -> xtyp => // FIXME: is it okay to do this here? we should probably ensure typ is a TypeRef...
val targs = typ.typeArgs
//if (xtyp.typeSymbol.isParameter) return None alsoDo debug(s"Cannot match parameter type `$xtyp`.")
//// ^ Sometimes we get this kind of types because of type-tag-based uninterpretedType
// ^ We now use scala-reflect "free types" with the PARAM flag to encode type parameters
if (xtyp =:= ruh.Null) return None alsoDo debug(s"Cannot match type `Null`.") // TODO actually allow this match (but soundly)
def erron = None alsoDo debug(s"Method `baseType` returned an erroneous type.")
if (va == Contravariant && xtyp.typeSymbol != typ.typeSymbol) {
val baseTargs = typ.baseType(xtyp.typeSymbol) match {
case NoType =>
debug(s"$va $typ is not an instance of ${xtyp.typeSymbol}")
if (typ <:< xtyp) {
debug(s"... but $typ is still somehow a subtype of ${xtyp}")
//assert(Any <:< xtyp,
// s"$typ <:< $xtyp but !($xtyp >:> Any) and ${xtyp.typeSymbol} is not a base type of $typ")
assert(typ <:< ruh.Nothing,
s"$typ <:< $xtyp but !($typ <:< Nothing) and ${xtyp.typeSymbol} is not a base type of $typ")
Stream continually None
}
else return None
case base if base |> isErroneous => return erron
case base =>
debug(s"$va $typ is an instance of ${xtyp.typeSymbol} as '$base'")
base.typeArgs.toStream.map(Some.apply)
}
if (isDebugEnabled) {
val ets = baseTargs zip xtyp.typeArgs zip (xtyp.typeSymbol.asType.typeParams map (Variance of _.asType) map (_ * va))
if (ets nonEmpty) debug(s"$va Extr Targs(contra case): "+(ets mkString " "))
}
assert(!baseTargs.hasDefiniteSize || xtyp.typeArgs.size == baseTargs.size)
val extr = (baseTargs zip xtyp.typeArgs zip xtyp.typeSymbol.asType.typeParams) map {
case ((a,b), p) =>
val newVa = (Variance of p.asType) * va
extractType(a.fold[TypeRep](ExtractedType(newVa,ruh.Nothing,ruh.Any))(TypeRep), b, newVa) getOrElse (return None) }
val extr2 = targs zip typ.typeSymbol.asType.typeParams map {case(ta,tp) => /*Variance.of(tp.asType) match {
//case Covariant | Invariant => extractType(ta, Nothing, Contravariant)
case Covariant | Invariant => extractType(ta, Nothing, Covariant)
case Contravariant => extractType(ta, Any, Contravariant)
}*/
//extractType(ta, Nothing, Covariant)
extractType(ta, Any, Contravariant) // FIXME should really be the interval 'LB..HB'
} map (_ getOrElse (return None)) //filter (_ => false)
Some((extr ++ extr2).foldLeft[Extract](Map(), Map(), Map()){case(acc,a) => merge(acc,a) getOrElse (return None)})
} else if (va == Invariant && xtyp.typeSymbol != typ.typeSymbol) {
debug(s"${xtyp.typeSymbol} and ${typ.typeSymbol} cannot be matched invariantly")
None
} else { // Here, either we're in covariant case, or the typeSymbols match (or both)
val base = xtyp.baseType(typ.typeSymbol).orElse(NoType)
// ^ this `.orElse(NoType)` seems to semantically do nothing; in fact, it will convert a NoType in the wrong
// universe to a NoType in our current universe so that the comparison below `base == NoType` works correctly.
// The reason we sometimes get types from different universes (a.k.a. alien types) is too gruesome to be explained here.
val baseTargs = if (base == NoType) {
debug(s"$xtyp not an instance of ${typ.typeSymbol}")
if (xtyp <:< typ) {
debug(s"... but $xtyp is still somehow a subtype of ${typ}")
assert((xtyp <:< ruh.Nothing) || (xtyp <:< ruh.Null),
s"$xtyp <:< $typ but !($xtyp <:< Nothing) and ${typ.typeSymbol} is not a base type of $xtyp")
Stream continually None
}
else return None
}
else if (base |> isErroneous) return erron
else base.typeArgs.toStream.map(Some.apply)
if (isDebugEnabled) {
val ets = targs zip baseTargs zip (typ.typeSymbol.asType.typeParams map (Variance of _.asType) map (_ * va))
if (ets nonEmpty) debug(s"$va Extr Targs: " + (ets mkString " "))
}
assert(!baseTargs.hasDefiniteSize || targs.size == baseTargs.size, s"$baseTargs $targs")
val extr = (targs zip baseTargs zip typ.typeSymbol.asType.typeParams) map {
case ((a,b), p) =>
val newVa = (Variance of p.asType) * va
extractType(a, b.fold[TypeRep](ExtractedType(newVa,ruh.Nothing,ruh.Any))(TypeRep), newVa) getOrElse (return None) }
//dbg("EXTR",extr)
Some(extr.foldLeft[Extract](Map(), Map(), Map()){case(acc,a) => merge(acc,a) getOrElse (return None)})
}
}}
}
/** Note: will _not_ try to special-case type holes (encoded as normal Scala types...)
* This is because reinterpreted types are not usually from extractor terms -- however, this assumption might turn wrong at some point */
def reinterpretType(tr: TypeRep, newBase: Base): newBase.TypeRep = {
val modEmb = new ModularEmbedding[sru.type,newBase.type](sru, newBase, debug = x => debug(x))
modEmb.liftType(tr.tpe)
}
def nullValue[T: CodeType]: ClosedCode[T] = {
val tp = implicitly[CodeType[T]].rep.tpe |>=? {
case TypeHoleRep(name) => throw new IllegalArgumentException("Type hole has no known nullValue.")
}
Code(const(
if (tp <:< sru.typeOf[Unit]) NULL_UNIT
else if (tp <:< sru.typeOf[Bool]) NULL_BOOL
else if (tp <:< sru.typeOf[Char]) NULL_CHAR
else if (tp <:< sru.typeOf[Byte]) NULL_BYTE
else if (tp <:< sru.typeOf[Short]) NULL_SHORT
else if (tp <:< sru.typeOf[Int]) NULL_INT
else if (tp <:< sru.typeOf[Long]) NULL_LONG
else if (tp <:< sru.typeOf[Float]) NULL_FLOAT
else if (tp <:< sru.typeOf[Double]) NULL_DOUBLE
else {
val N = sru.typeOf[Null]
assert(N <:< tp || tp <:< N, // second case is for the stupid literal type Null(null)
s"$tp is not nullable nor .")
NULL_NULL
}
))
}
private var NULL_UNIT: Unit = _
private var NULL_BOOL: Bool = _
private var NULL_CHAR: Char = _
private var NULL_BYTE: Byte = _
private var NULL_SHORT: Short = _
private var NULL_INT: Int = _
private var NULL_LONG: Long = _
private var NULL_FLOAT: Float = _
private var NULL_DOUBLE: Double = _
private var NULL_NULL: Null = _
}
| epfldata/squid | src/main/scala/squid/ir/ScalaTyping.scala | Scala | apache-2.0 | 14,932 |
package es.upm.fi.oeg.morph.tc
import collection.JavaConversions._
class D006Test extends R2RMLTest("D006-1table1primarykey1column1row") {
"TC0006a" should "gen 1 in NG" in{
val dg=generate("R2RMLTC0006a")
dg.getGraph(dg.listGraphNodes.toList.head).size should be (1)
}
} | jpcik/morph | morph-r2rml-tc/src/test/scala/es/upm/fi/oeg/morph/tc/D006Test.scala | Scala | apache-2.0 | 282 |
package nexus.typelevel
import nexus._
import org.scalatest._
import shapeless._
/**
* @author Tongfei Chen
*/
class ToHListFromHListTest extends FunSuite {
class a; val a = new a
class b; val b = new b
class c; val c = new c
def toHList[A, Ah <: HList](a: A)(implicit t: ToHList.Aux[A, Ah]): Ah = t(a)
def fromHList[A, Ah <: HList](ah: Ah)(implicit t: FromHList.Aux[Ah, A]): A = t(ah)
test("ToHList should convert types to their canonical HList representation") {
assert(toHList(()) == HNil)
assert(toHList(1 -> 2) == 1 :: 2 :: HNil)
assert(toHList("a", "b", "c") == "a" :: "b" :: "c" :: HNil)
}
test("FromHList should convert HLists to their corresponding human-readable singleton/tuple type") {
assert(fromHList($) == ())
assert(fromHList(a :: $) == a)
assert(fromHList(b :: HNil) == b)
assert(fromHList(a :: b :: $) == (a, b))
}
}
| ctongfei/nexus | tensor/src/test/scala/nexus/typelevel/ToHListFromHListTest.scala | Scala | mit | 895 |
import pl.project13.scala.sbt.JmhPlugin
import sbt._
import sbt.Keys._
import sbtassembly.AssemblyKeys._
import sbtdocker.DockerKeys._
import sbtunidoc.Plugin._
import scoverage.ScoverageKeys._
object LinkerdBuild extends Base {
import Base._
import Grpc._
val Bundle = config("bundle")
val Dcos = config("dcos") extend Bundle
val LowMem = config("lowmem") extend Bundle
val configCore = projectDir("config")
.withTwitterLibs(Deps.finagle("core"))
.withLibs(Deps.jackson)
.withLib(Deps.jacksonYaml)
val consul = projectDir("consul")
.dependsOn(configCore)
.withTwitterLib(Deps.finagle("http"))
.withLibs(Deps.jackson)
.withTests()
val etcd = projectDir("etcd")
.withTwitterLib(Deps.finagle("http"))
.withLibs(Deps.jackson ++ Deps.jodaTime)
.withTests().withIntegration()
lazy val k8s = projectDir("k8s")
.dependsOn(Namer.core)
.withTwitterLib(Deps.finagle("http"))
.withLibs(Deps.jackson)
.withTests()
val marathon = projectDir("marathon")
.withTwitterLib(Deps.finagle("http"))
.withLibs(Deps.jackson)
.withTests()
object Router {
val core = projectDir("router/core")
.dependsOn(Finagle.buoyantCore)
.withTwitterLib(Deps.finagle("core"))
.withTests()
.withE2e()
.settings(coverageExcludedPackages := ".*XXX_.*")
val h2 = projectDir("router/h2")
.dependsOn(core, Finagle.h2)
.withTests()
.withE2e()
val http = projectDir("router/http")
.dependsOn(core)
.withTwitterLibs(Deps.finagle("http"))
.withTests()
.withE2e()
val mux = projectDir("router/mux")
.dependsOn(core)
.withTwitterLib(Deps.finagle("mux"))
.withE2e()
val thriftIdl = projectDir("router/thrift-idl")
.withTwitterLib(Deps.finagle("thrift"))
.settings(coverageExcludedPackages := ".*thriftscala.*")
val thrift = projectDir("router/thrift")
.withTwitterLib(Deps.finagle("thrift"))
.withTests()
.withE2e()
.dependsOn(
core,
thriftIdl % "test,e2e"
)
val all = aggregateDir("router", core, h2, http, mux, thrift)
}
object Mesh {
val core = projectDir("mesh/core")
.dependsOn(Grpc.runtime)
.withGrpc
val all = aggregateDir("mesh", core)
}
object Namer {
val core = projectDir("namer/core")
.dependsOn(configCore)
.withLib(Deps.jacksonCore)
.withTests()
val consul = projectDir("namer/consul")
.dependsOn(LinkerdBuild.consul, core)
.withTests()
val curator = projectDir("namer/curator")
.dependsOn(core)
.withLibs(Deps.curatorFramework, Deps.curatorClient, Deps.curatorDiscovery)
.withTests()
val fs = projectDir("namer/fs")
.dependsOn(core % "compile->compile;test->test")
.withTests()
val k8s = projectDir("namer/k8s")
.dependsOn(LinkerdBuild.k8s, core)
.withTests()
val marathon = projectDir("namer/marathon")
.dependsOn(LinkerdBuild.marathon, core)
.withLib(Deps.jwt)
.withTests()
val serversets = projectDir("namer/serversets")
.withTwitterLib(Deps.finagle("serversets").exclude("org.slf4j", "slf4j-jdk14"))
.withTests()
.dependsOn(core % "compile->compile;test->test")
val zkLeader = projectDir("namer/zk-leader")
.dependsOn(core)
.withLib(Deps.zkCandidate)
.withTests()
val all = aggregateDir("namer", core, consul, curator, fs, k8s, marathon, serversets, zkLeader)
}
val admin = projectDir("admin")
.dependsOn(configCore, Namer.core)
.withTwitterLib(Deps.twitterServer)
.withTwitterLib(Deps.finagle("stats"))
.withTests()
object Telemetry {
val core = projectDir("telemetry/core")
.dependsOn(configCore)
.withTwitterLib(Deps.finagle("core"))
.withTwitterLib(Deps.finagle("stats"))
.withTests()
val adminMetricsExport = projectDir("telemetry/admin-metrics-export")
.dependsOn(LinkerdBuild.admin, core)
.withLib(Deps.jacksonCore)
.withTests()
val prometheus = projectDir("telemetry/prometheus")
.dependsOn(LinkerdBuild.admin, core)
.withTwitterLibs(Deps.finagle("core"), Deps.finagle("stats"))
.withTests()
val statsd = projectDir("telemetry/statsd")
.dependsOn(core, Router.core)
.withLib(Deps.statsd)
.withTests()
val tracelog = projectDir("telemetry/tracelog")
.dependsOn(core, Router.core)
.withTests()
val recentRequests = projectDir("telemetry/recent-requests")
.dependsOn(admin, core, Router.core)
val zipkin = projectDir("telemetry/zipkin")
.withTwitterLibs(Deps.finagle("zipkin-core"), Deps.finagle("zipkin"))
.dependsOn(core, Router.core)
.withTests()
val all = aggregateDir("telemetry", adminMetricsExport, core, prometheus, recentRequests, statsd, tracelog, zipkin)
}
val ConfigFileRE = """^(.*)\\.yaml$""".r
val execScriptJvmOptions =
"""|DEFAULT_JVM_OPTIONS="-Djava.net.preferIPv4Stack=true \\
| -Dsun.net.inetaddr.ttl=60 \\
| -Xms${JVM_HEAP_MIN:-32M} \\
| -Xmx${JVM_HEAP_MAX:-1024M} \\
| -XX:+AggressiveOpts \\
| -XX:+UseConcMarkSweepGC \\
| -XX:+CMSParallelRemarkEnabled \\
| -XX:+CMSClassUnloadingEnabled \\
| -XX:+ScavengeBeforeFullGC \\
| -XX:+CMSScavengeBeforeRemark \\
| -XX:+UseCMSInitiatingOccupancyOnly \\
| -XX:CMSInitiatingOccupancyFraction=70 \\
| -XX:-TieredCompilation \\
| -XX:+UseStringDeduplication \\
| -Dcom.twitter.util.events.sinkEnabled=false \\
| ${LOCAL_JVM_OPTIONS:-} "
|""".stripMargin
object Namerd {
val core = projectDir("namerd/core")
.dependsOn(
admin,
configCore,
Namer.core,
Namer.fs % "test",
Telemetry.core,
Telemetry.adminMetricsExport
)
.withTests()
object Storage {
val consul = projectDir("namerd/storage/consul")
.dependsOn(core)
.dependsOn(LinkerdBuild.consul)
.withTests()
val etcd = projectDir("namerd/storage/etcd")
.dependsOn(core, LinkerdBuild.etcd % "integration->integration;compile->compile")
.withTests()
.withIntegration()
val inMemory = projectDir("namerd/storage/in-memory")
.dependsOn(core % "test->test;compile->compile")
.withTests()
val k8s = projectDir("namerd/storage/k8s")
.dependsOn(core)
.dependsOn(LinkerdBuild.k8s)
.withTests()
val zk = projectDir("namerd/storage/zk")
.dependsOn(core)
.withTwitterLib(Deps.finagle("serversets").exclude("org.slf4j", "slf4j-jdk14"))
.withTests()
val all = aggregateDir("namerd/storage", consul, etcd, inMemory, k8s, zk)
}
object Iface {
val controlHttp = projectDir("namerd/iface/control-http")
.withTwitterLib(Deps.finagle("http"))
.withTests().dependsOn(
core % "test->test;compile->compile",
Storage.inMemory % "test"
)
val interpreterThriftIdl = projectDir("namerd/iface/interpreter-thrift-idl")
.withTwitterLib(Deps.finagle("thrift"))
.settings(coverageExcludedPackages := ".*thriftscala.*")
val interpreterThrift = projectDir("namerd/iface/interpreter-thrift")
.dependsOn(core, interpreterThriftIdl)
.withLib(Deps.guava)
.withTwitterLibs(Deps.finagle("thrift"), Deps.finagle("thriftmux"))
.withTests()
val mesh = projectDir("namerd/iface/mesh")
.dependsOn(core, Mesh.core)
val all = aggregateDir(
"namerd/iface",
controlHttp, interpreterThriftIdl, interpreterThrift, mesh
)
}
val main = projectDir("namerd/main")
.dependsOn(core, admin, configCore)
.withBuildProperties("io/buoyant/namerd")
.settings(coverageExcludedPackages := ".*")
/**
* An assembly-running script that adds the namerd plugin directory
* to the classpath if it exists.
*/
val execScript = (
"""|#!/bin/sh
|
|jars="$0"
|if [ -n "$NAMERD_HOME" ] && [ -d $NAMERD_HOME/plugins ]; then
| for jar in $NAMERD_HOME/plugins/*.jar ; do
| jars="$jars:$jar"
| done
|fi
|""" +
execScriptJvmOptions +
"""|exec "${JAVA_HOME:-/usr}/bin/java" -XX:+PrintCommandLineFlags \\
| ${JVM_OPTIONS:-$DEFAULT_JVM_OPTIONS} -cp $jars -server \\
| io.buoyant.namerd.Main "$@"
|"""
).stripMargin
val BundleSettings = Defaults.configSettings ++ appPackagingSettings ++ Seq(
mainClass := Some("io.buoyant.namerd.Main"),
assemblyExecScript := execScript.split("\\n").toSeq,
dockerEnvPrefix := "NAMERD_",
unmanagedBase := baseDirectory.value / "plugins",
assemblyJarName in assembly := s"${name.value}-${version.value}-exec",
dockerTag := version.value
)
val BundleProjects = Seq[ProjectReference](
core, main, Namer.fs, Storage.inMemory, Router.http,
Iface.controlHttp, Iface.interpreterThrift, Iface.mesh,
Namer.consul, Namer.k8s, Namer.marathon, Namer.serversets, Namer.zkLeader,
Iface.mesh,
Interpreter.perHost, Interpreter.k8s,
Storage.etcd, Storage.inMemory, Storage.k8s, Storage.zk, Storage.consul,
Telemetry.adminMetricsExport
)
val LowMemSettings = BundleSettings ++ Seq(
dockerJavaImage := "buoyantio/debian-32-bit",
dockerTag := s"${version.value}-32b",
assemblyJarName in assembly := s"${name.value}-${version.value}-32b-exec"
)
/**
* A DCOS-specific assembly-running script that:
* 1) adds the namerd plugin directory to the classpath if it exists
* 2) bootstraps zookeeper with a default path and dtabs
* 3) boots namerd
*/
val dcosExecScript = (
"""|#!/bin/bash
|
|jars="$0"
|if [ -n "$NAMERD_HOME" ] && [ -d $NAMERD_HOME/plugins ]; then
| for jar in $NAMERD_HOME/plugins/*.jar ; do
| jars="$jars:$jar"
| done
|fi
|""" +
execScriptJvmOptions +
"""|if read -t 0; then
| CONFIG_INPUT=`cat`
|fi
|
|echo $CONFIG_INPUT | \\
|${JAVA_HOME:-/usr}/bin/java -XX:+PrintCommandLineFlags \\
|${JVM_OPTIONS:-$DEFAULT_JVM_OPTIONS} -cp $jars -server \\
|io.buoyant.namerd.DcosBootstrap "$@"
|
|echo $CONFIG_INPUT | \\
|${JAVA_HOME:-/usr}/bin/java -XX:+PrintCommandLineFlags \\
|${JVM_OPTIONS:-$DEFAULT_JVM_OPTIONS} -cp $jars -server \\
|io.buoyant.namerd.Main "$@"
|
|exit
|"""
).stripMargin
val dcosBootstrap = projectDir("namerd/dcos-bootstrap")
.dependsOn(core, admin, configCore, Storage.zk)
val DcosSettings = BundleSettings ++ Seq(
assemblyExecScript := dcosExecScript.split("\\n").toSeq,
dockerTag := s"${version.value}-dcos",
assemblyJarName in assembly := s"${name.value}-${version.value}-dcos-exec"
)
val all = aggregateDir("namerd",
core, dcosBootstrap, main, Storage.all, Interpreter.all, Iface.all)
.configs(Bundle, Dcos, LowMem)
// Bundle includes all of the supported features:
.configDependsOn(Bundle)(BundleProjects: _*)
.settings(inConfig(Bundle)(BundleSettings))
.configDependsOn(LowMem)(BundleProjects: _*)
.settings(inConfig(LowMem)(LowMemSettings))
.configDependsOn(Dcos)(dcosBootstrap)
.settings(inConfig(Dcos)(DcosSettings))
.settings(
assembly <<= assembly in Bundle,
docker <<= docker in Bundle,
dockerBuildAndPush <<= dockerBuildAndPush in Bundle,
dockerPush <<= dockerPush in Bundle
)
// Find example configurations by searching the examples directory for config files.
val exampleConfigs = file("namerd/examples").list().toSeq.collect {
case ConfigFileRE(name) => config(name) -> exampleConfig(name)
}
def exampleConfig(name: String): Configuration = Bundle
val examples = projectDir("namerd/examples")
.withExamples(Namerd.all, exampleConfigs)
.configDependsOn(Test)(BundleProjects: _*)
.settings(publishArtifact := false)
.withTests()
}
object Interpreter {
val fs = projectDir("interpreter/fs")
.withTests()
.dependsOn(Namer.core, Namer.fs)
val namerd = projectDir("interpreter/namerd")
.withTests()
.dependsOn(
Namer.core,
Namerd.Iface.interpreterThrift,
Namerd.Iface.controlHttp,
Router.core)
val mesh = projectDir("interpreter/mesh")
.withTests()
.dependsOn(Namer.core, Mesh.core, Grpc.runtime)
val subnet = projectDir("interpreter/subnet")
.dependsOn(Namer.core)
.withTests()
val perHost = projectDir("interpreter/per-host")
.dependsOn(Namer.core, subnet)
.withTests()
val k8s = projectDir("interpreter/k8s")
.dependsOn(Namer.core, LinkerdBuild.k8s, perHost, subnet)
.withTests()
val all = aggregateDir("interpreter", fs, k8s, mesh, namerd, perHost, subnet)
}
object Linkerd {
val core = projectDir("linkerd/core")
.dependsOn(
configCore,
LinkerdBuild.admin,
Telemetry.core % "compile->compile;test->test",
Telemetry.adminMetricsExport,
Namer.core % "compile->compile;test->test",
Router.core
)
.withLib(Deps.jacksonCore)
.withGrpc
.withTests()
.withE2e()
.configWithLibs(Test)(Deps.jacksonDatabind, Deps.jacksonYaml)
val tls = projectDir("linkerd/tls")
.dependsOn(core)
.withTests()
val failureAccrual = projectDir("linkerd/failure-accrual")
.dependsOn(core)
.withTests()
object Protocol {
val h2 = projectDir("linkerd/protocol/h2")
.dependsOn(core, Router.h2, k8s)
.withTests()
.withTwitterLibs(Deps.finagle("netty4"))
val http = projectDir("linkerd/protocol/http")
.withTests().withE2e().withIntegration()
.withTwitterLibs(Deps.finagle("netty4-http"))
.dependsOn(
core % "compile->compile;e2e->test;integration->test",
k8s,
tls % "integration",
Namer.fs % "integration",
Router.http)
val mux = projectDir("linkerd/protocol/mux")
.dependsOn(core, Router.mux)
val thrift = projectDir("linkerd/protocol/thrift")
.dependsOn(core, Router.thrift % "compile->compile;test->test;e2e->e2e")
.withTests()
.withE2e()
val benchmark = projectDir("linkerd/protocol/benchmark")
.dependsOn(http, testUtil)
.enablePlugins(JmhPlugin)
.settings(publishArtifact := false)
.withTwitterLib(Deps.twitterUtil("benchmark"))
val all = aggregateDir("linkerd/protocol", benchmark, h2, http, mux, thrift)
}
object Announcer {
val serversets = projectDir("linkerd/announcer/serversets")
.withTwitterLib(Deps.finagle("serversets").exclude("org.slf4j", "slf4j-jdk14"))
.dependsOn(core)
val all = aggregateDir("linkerd/announcer", serversets)
}
val admin = projectDir("linkerd/admin")
.withTwitterLib(Deps.twitterServer)
.withTests()
.dependsOn(core % "compile->compile;test->test")
.dependsOn(LinkerdBuild.admin, Namer.core)
.dependsOn(Protocol.thrift % "test")
val main = projectDir("linkerd/main")
.dependsOn(admin, configCore, core)
.withTwitterLib(Deps.twitterServer)
.withLibs(Deps.jacksonCore, Deps.jacksonDatabind, Deps.jacksonYaml)
.withBuildProperties("io/buoyant/linkerd")
.settings(coverageExcludedPackages := ".*")
/*
* linkerd packaging configurations.
*
* linkerd is configured to be assembled into an executable and may
* be assembled into a dockerfile.
*/
/**
* An assembly-running script that adds the linkerd plugin directory
* to the classpath if it exists.
*/
val execScript = (
"""|#!/bin/sh
|
|jars="$0"
|if [ -n "$L5D_HOME" ] && [ -d $L5D_HOME/plugins ]; then
| for jar in $L5D_HOME/plugins/*.jar ; do
| jars="$jars:$jar"
| done
|fi
|""" +
execScriptJvmOptions +
"""|exec "${JAVA_HOME:-/usr}/bin/java" -XX:+PrintCommandLineFlags \\
| ${JVM_OPTIONS:-$DEFAULT_JVM_OPTIONS} -cp $jars -server \\
| io.buoyant.linkerd.Main "$@"
|"""
).stripMargin
val BundleSettings = Defaults.configSettings ++ appPackagingSettings ++ Seq(
mainClass := Some("io.buoyant.linkerd.Main"),
assemblyExecScript := execScript.split("\\n").toSeq,
dockerEnvPrefix := "L5D_",
unmanagedBase := baseDirectory.value / "plugins",
assemblyJarName in assembly := s"${name.value}-${version.value}-exec",
dockerTag := version.value
)
val BundleProjects = Seq[ProjectReference](
admin, core, main, configCore,
Namer.consul, Namer.fs, Namer.k8s, Namer.marathon, Namer.serversets, Namer.zkLeader, Namer.curator,
Interpreter.fs, Interpreter.k8s, Interpreter.mesh, Interpreter.namerd, Interpreter.perHost, Interpreter.subnet,
Protocol.h2, Protocol.http, Protocol.mux, Protocol.thrift,
Announcer.serversets,
Telemetry.adminMetricsExport, Telemetry.core, Telemetry.prometheus, Telemetry.recentRequests, Telemetry.statsd, Telemetry.tracelog, Telemetry.zipkin,
tls,
failureAccrual
)
val LowMemSettings = BundleSettings ++ Seq(
dockerJavaImage := "buoyantio/debian-32-bit",
dockerTag := s"${version.value}-32b",
assemblyJarName in assembly := s"${name.value}-${version.value}-32b-exec"
)
val all = aggregateDir("linkerd",
admin, configCore, core, failureAccrual, main, tls,
Announcer.all, Namer.all, Protocol.all)
.configs(Bundle, LowMem)
// Bundle is includes all of the supported features:
.configDependsOn(Bundle)(BundleProjects: _*)
.settings(inConfig(Bundle)(BundleSettings))
.configDependsOn(LowMem)(BundleProjects: _*)
.settings(inConfig(LowMem)(LowMemSettings))
.settings(
assembly <<= assembly in Bundle,
docker <<= docker in Bundle,
dockerBuildAndPush <<= dockerBuildAndPush in Bundle,
dockerPush <<= dockerPush in Bundle
)
// Find example configurations by searching the examples directory for config files.
val exampleConfigs = file("linkerd/examples").list().toSeq.collect {
case ConfigFileRE(name) => config(name) -> exampleConfig(name)
}
def exampleConfig(name: String): Configuration = Bundle
val examples = projectDir("linkerd/examples")
.withExamples(Linkerd.all, exampleConfigs)
.configDependsOn(Test)(BundleProjects: _*)
.settings(publishArtifact := false)
.withTests()
}
val validateAssembled = taskKey[Unit]("run validation against assembled artifacts")
val validator = projectDir("validator")
.withTwitterLibs(Deps.twitterServer, Deps.twitterUtil("events"), Deps.finagle("http"))
.settings(
mainClass := Some("io.buoyant.namerd.Validator"),
validateAssembled := (Def.taskDyn {
val linkerd = (assembly in Bundle in Linkerd.all).value
val namerd = (assembly in Bundle in Namerd.all).value
Def.task {
(run in Compile).toTask(s" -linkerd.exec=$linkerd -namerd.exec=$namerd").value
}
}).value,
coverageExcludedPackages := ".*",
publishArtifact := false
)
// Note: Finagle and Grpc modules defined in other files.
// All projects must be exposed at the root of the object in
// dependency-order:
val router = Router.all
val routerCore = Router.core
val routerH2 = Router.h2
val routerHttp = Router.http
val routerMux = Router.mux
val routerThrift = Router.thrift
val routerThriftIdl = Router.thriftIdl
val mesh = Mesh.all
val meshCore = Mesh.core
val telemetry = Telemetry.all
val telemetryAdminMetricsExport = Telemetry.adminMetricsExport
val telemetryCore = Telemetry.core
val telemetryPrometheus = Telemetry.prometheus
val telemetryRecentRequests = Telemetry.recentRequests
val telemetryStatsD = Telemetry.statsd
val telemetryTracelog = Telemetry.tracelog
val telemetryZipkin = Telemetry.zipkin
val namer = Namer.all
val namerCore = Namer.core
val namerConsul = Namer.consul
val namerCurator = Namer.curator
val namerFs = Namer.fs
val namerK8s = Namer.k8s
val namerMarathon = Namer.marathon
val namerServersets = Namer.serversets
val namerZkLeader = Namer.zkLeader
val namerd = Namerd.all
val namerdExamples = Namerd.examples
val namerdCore = Namerd.core
val namerdDcosBootstrap = Namerd.dcosBootstrap
val namerdIface = Namerd.Iface.all
val namerdIfaceControlHttp = Namerd.Iface.controlHttp
val namerdIfaceInterpreterThriftIdl = Namerd.Iface.interpreterThriftIdl
val namerdIfaceInterpreterThrift = Namerd.Iface.interpreterThrift
val namerdIfaceMesh = Namerd.Iface.mesh
val namerdStorageEtcd = Namerd.Storage.etcd
val namerdStorageInMemory = Namerd.Storage.inMemory
val namerdStorageK8s = Namerd.Storage.k8s
val namerdStorageZk = Namerd.Storage.zk
val namerdStorageConsul = Namerd.Storage.consul
val namerdStorage = Namerd.Storage.all
val namerdMain = Namerd.main
val interpreter = Interpreter.all
val interpreterFs = Interpreter.fs
val interpreterK8s = Interpreter.k8s
val interpreterMesh = Interpreter.mesh
val interpreterNamerd = Interpreter.namerd
val interpreterPerHost = Interpreter.perHost
val interpreterSubnet = Interpreter.subnet
val linkerd = Linkerd.all
val linkerdBenchmark = Linkerd.Protocol.benchmark
val linkerdExamples = Linkerd.examples
val linkerdAdmin = Linkerd.admin
val linkerdConfig = configCore
val linkerdCore = Linkerd.core
val linkerdMain = Linkerd.main
val linkerdProtocol = Linkerd.Protocol.all
val linkerdProtocolH2 = Linkerd.Protocol.h2
val linkerdProtocolHttp = Linkerd.Protocol.http
val linkerdProtocolMux = Linkerd.Protocol.mux
val linkerdProtocolThrift = Linkerd.Protocol.thrift
val linkerdAnnouncer = Linkerd.Announcer.all
val linkerdAnnouncerServersets = Linkerd.Announcer.serversets
val linkerdTls = Linkerd.tls
val linkerdFailureAccrual = Linkerd.failureAccrual
// Unified documentation via the sbt-unidoc plugin
val all = project("all", file("."))
.settings(aggregateSettings ++ unidocSettings)
.aggregate(
admin,
configCore,
consul,
etcd,
k8s,
marathon,
testUtil,
Finagle.all,
Grpc.all,
Interpreter.all,
Linkerd.all,
Linkerd.examples,
Namer.all,
Namerd.all,
Namerd.examples,
Router.all,
Telemetry.all
)
}
| hhtpcd/linkerd | project/LinkerdBuild.scala | Scala | apache-2.0 | 23,268 |
/*
* Copyright (c) <2015-2016>, see CONTRIBUTORS
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the <organization> nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package ch.usi.inf.l3.sana.brokenj.typechecker
import ch.usi.inf.l3.sana
import sana.brokenj
import sana.primj
import sana.tiny
import sana.calcj
import tiny.core.CheckerComponent
import tiny.dsl._
import tiny.ast._
import primj.ast.Implicits._
import calcj.ast._
import calcj.ast.operators.{Inc, Dec}
import tiny.errors.ErrorReporting.{error,warning}
import primj.symbols._
import primj.modifiers.Ops._
import primj.errors.ErrorCodes._
import primj.typechecker.ShapeCheckerComponent
import brokenj.ast._
import brokenj.ast.TreeUtils
@component
trait LabelShapeCheckerComponent extends ShapeCheckerComponent {
(lbl: LabelApi) => {
if(!canHaveLabel(lbl.stmt)) {
error(UNEXPECTED_TREE,
lbl.stmt.toString, "an expression", lbl.stmt.pos)
}
check(lbl.stmt)
}
/** @see [[brokenj.ast.TreeUtils.canHaveLabel]] */
protected def canHaveLabel(stmt: Expr): Boolean =
TreeUtils.canHaveLabel(stmt)
}
@component
trait SwitchShapeCheckerComponent extends ShapeCheckerComponent {
(switch: SwitchApi) => {
check(switch.expr)
switch.cases.foreach(check(_))
}
}
@component
trait CaseShapeCheckerComponent extends ShapeCheckerComponent {
(cse: CaseApi) => {
check(cse.body)
cse.guards.foreach(check(_))
}
}
| amanjpro/languages-a-la-carte | brokenj/src/main/scala/typechecker/shapecheckers.scala | Scala | bsd-3-clause | 2,834 |
package cobalt.parser.expression
import cobalt.ast.AST._
import cobalt.parser.ExpressionParser
import cobalt.utils.TestUtil
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FunSpec, Matchers}
import scala.collection.mutable.ArrayBuffer
@RunWith(classOf[JUnitRunner])
class ExpressionParserTest extends FunSpec with Matchers
{
describe("Nested expression call parser test") {
it("Should parse nested expression calls") {
TestUtil.parse("x.toString()", ExpressionParser.expressionParser) shouldBe NestedExpr(ArrayBuffer(Identifier(Name("x")), MethodCall(Name("toString"),ArrayBuffer(BlockExpr(ArrayBuffer())))))
}
}
// TODO "methodCall1().methodCall2()"
// TODO "methodCall1(a).methodCall2(a)"
// TODO "methodCall1(a, b, c).methodCall2(a, b, c)"
// TODO "varName1.varName2"
// TODO "methodCall1().varName1"
// TODO "methodCall1(a).varName1"
// TODO "methodCall1(a, b, c).varName1"
// TODO "this.varName2"
// TODO "super.varName2"
}
| Michael2109/cobalt | src/test/scala/cobalt/parser/expression/ExpressionParserTest.scala | Scala | lgpl-3.0 | 1,022 |
package precogbuild
import sbt._, Keys._
import scala._, Predef._
object Build {
val BothScopes = "compile->compile;test->test"
val warningOpts = Seq(
//"-g:vars",
//"-deprecation",
//"-unchecked",
//"-Ywarn-value-discard",
//"-Ywarn-numeric-widen",
//"-Ywarn-unused",
"-Ywarn-unused-import")
val defaultArgSet = Seq(
"-Ypartial-unification")
/** Watch out Jonesy! It's the ol' double-cross!
* Why, you...
*
* Given a path like src/main/scala we want that to explode into something like the
* following, assuming we're currently building with java 1.7 and scala 2.10.
*
* src/main/scala
* src/main/scala_2.10
* src/main_1.7/scala
* src/main_1.7/scala_2.10
*
* Similarly for main/test, 2.10/2.11, 1.7/1.8.
*/
def doubleCross(config: Configuration) = {
unmanagedSourceDirectories in config ++= {
val jappend = Seq("", "_" + javaSpecVersion)
val sappend = Seq("", "_" + scalaBinaryVersion.value)
val basis = (sourceDirectory in config).value
val parent = basis.getParentFile
val name = basis.getName
for (j <- jappend ; s <- sappend) yield parent / s"$name$j" / s"scala$s"
}
}
def javaSpecVersion: String = sys.props("java.specification.version")
def inBoth[A](f: Configuration => Seq[A]): Seq[A] = List(Test, Compile) flatMap f
def kindProjector = "org.spire-math" % "kind-projector" % "0.9.3" cross CrossVersion.binary
implicit class ProjectOps(val p: sbt.Project) {
def noArtifacts: Project = also(
publish := (()),
publishLocal := (()),
Keys.`package` := file(""),
packageBin := file(""),
packagedArtifacts := Map()
)
def root: Project = p in file(".")
def also(ss: Seq[Setting[_]]): Project = p settings (ss: _*)
def also(s: Setting[_], ss: Setting[_]*): Project = also(s :: ss.toList)
def deps(ms: ModuleID*): Project = also(libraryDependencies ++= ms.toList)
def scalacArgs(args: String*): Project = also(scalacOptions ++= args.toList)
def strictVersions: Project = also(conflictManager := ConflictManager.strict)
def serialTests: Project = also(parallelExecution in Test := false)
def withWarnings: Project = scalacArgs(warningOpts: _*)
def logImplicits: Project = scalacArgs("-Xlog-implicits")
def crossJavaTargets: Project = also(inBoth(doubleCross))
def scalacPlugins(ms: ModuleID*): Project = also(ms.toList map (m => addCompilerPlugin(m)))
def setup: Project = (
serialTests scalacPlugins (kindProjector) scalacArgs (defaultArgSet: _*) also(
organization := "org.quasar-analytics",
scalaVersion := "2.11.8",
scalaOrganization := "org.typelevel",
logBuffered in Test := false,
// fork in Test := true,
unmanagedJars in Compile += (baseDirectory in ThisBuild).value / "lib" / "jdbm-3.0-SNAPSHOT.jar"))
}
}
| drostron/quasar | project/Precog.scala | Scala | apache-2.0 | 3,191 |
package com.mizhi.nlp.stemmers.huskpaice
import org.scalatest.mockito.MockitoSugar
import org.scalatest.{FunSpec, Matchers}
abstract class UnitSpec extends FunSpec with Matchers with MockitoSugar | mizhi/scala-stemmer | src/test/scala/com/mizhi/nlp/stemmers/huskpaice/UnitSpec.scala | Scala | mit | 197 |
package org.jetbrains.plugins.scala
package debugger
import java.io._
import java.security.MessageDigest
import java.util
import com.intellij.execution.application.{ApplicationConfiguration, ApplicationConfigurationType}
import com.intellij.ide.highlighter.{ModuleFileType, ProjectFileType}
import com.intellij.openapi.module.Module
import com.intellij.openapi.util.io.FileUtil
import com.intellij.openapi.vfs.newvfs.impl.VfsRootAccess
import com.intellij.openapi.vfs.{LocalFileSystem, VfsUtil}
import com.intellij.testFramework.{PlatformTestCase, PsiTestUtil, UsefulTestCase}
import org.jetbrains.plugins.scala.extensions.inWriteAction
import org.jetbrains.plugins.scala.util.TestUtils
import org.junit.Assert
import scala.collection.mutable
import scala.util.Try
/**
* @author Roman.Shein
* Date: 03.03.14
*/
abstract class ScalaDebuggerTestBase extends ScalaCompilerTestBase {
protected def checksumsFileName = "checksums.dat"
protected val testDataBasePrefix = "debugger"
private var checksums: mutable.HashMap[String, Array[Byte]] = null
protected var needMake = false
private val sourceFiles = mutable.HashMap[String, String]()
override def setUp() {
val testDataValid = testDataProjectIsValid()
if (!testDataValid) {
needMake = true
val testDataProjectPath = testDataBasePath
if (testDataProjectPath.exists()) FileUtil.delete(testDataProjectPath)
}
UsefulTestCase.edt(new Runnable {
def run() {
ScalaDebuggerTestBase.super.setUp()
checkOrAddAllSourceFiles()
addScalaSdk()
addOtherLibraries()
}
})
}
/**
* Intended for loading libraries different from scala-compiler.
*/
protected def addOtherLibraries()
protected def addIvyCacheLibrary(libraryName: String, libraryPath: String, jarNames: String*) {
val libsPath = TestUtils.getIvyCachePath
val pathExtended = s"$libsPath/$libraryPath/"
VfsRootAccess.allowRootAccess(pathExtended)
PsiTestUtil.addLibrary(myModule, libraryName, pathExtended, jarNames: _*)
}
override def setUpModule(): Unit = {
if (needMake) super.setUpModule()
else myModule = loadModule(getImlFile.getAbsolutePath)
PlatformTestCase.myFilesToDelete.remove(getImlFile)
}
override def getIprFile: File = {
val file = new File(testDataBasePath, testClassName + ProjectFileType.DOT_DEFAULT_EXTENSION)
FileUtil.createIfDoesntExist(file)
file
}
protected def getImlFile: File = {
if (testDataBasePath.exists()) testDataBasePath.listFiles().find {
_.getName.endsWith(ModuleFileType.DOT_DEFAULT_EXTENSION)
}.orNull
else null
}
override def runInDispatchThread(): Boolean = false
override def invokeTestRunnable(runnable: Runnable): Unit = runnable.run()
protected def getRunProfile(module: Module, className: String) = {
val configuration: ApplicationConfiguration = new ApplicationConfiguration("app", module.getProject, ApplicationConfigurationType.getInstance)
configuration.setModule(module)
configuration.setMainClassName(className)
configuration
}
override protected def addFileToProject(fileName: String, fileText: String) {
def virtualFileExists(file: File) = {
Try(getVirtualFile(file).exists()).getOrElse(false)
}
val file = getFileInSrc(fileName)
if (needMake || !fileWithTextExists(file, fileText)) {
needMake = true
if (file.exists() || virtualFileExists(file)) {
val vFile = getVirtualFile(file)
inWriteAction(VfsUtil.saveText(vFile, fileText))
}
else super.addFileToProject(fileName, fileText)
}
}
protected def addSourceFile(relPathInSrc: String, fileText: String) = {
sourceFiles += relPathInSrc -> fileText
}
def checkOrAddAllSourceFiles() = {
if (sourceFiles.exists {
case (path, text) => !fileWithTextExists(new File(path), text)
}) {
}
sourceFiles.foreach {
case (path, text) => addFileToProject(path, text)
}
}
protected def addFileToProject(fileText: String) {
Assert.assertTrue(s"File should start with `object $mainClassName`", fileText.startsWith(s"object $mainClassName"))
addFileToProject(mainFileName, fileText)
}
protected def getFileInSrc(fileName: String): File = {
if (!srcDir.exists()) srcDir.mkdir()
new File(srcDir, fileName)
}
protected def testClassName: String = this.getClass.getSimpleName.stripSuffix("Test")
protected def testDataBasePath(dataPath: String): File = {
val testDataDir = new File(TestUtils.getTestDataPath, dataPath)
val classTestsDir = new File(testDataDir, testClassName)
if (classTestsDir.exists()) classTestsDir
else {
FileUtil.createDirectory(classTestsDir)
classTestsDir
}
}
protected def mainClassName = getTestName(false)
protected def mainFileName = s"$mainClassName.scala"
private def testDataBasePath: File = testDataBasePath(testDataBasePrefix)
def getVirtualFile(file: File) = LocalFileSystem.getInstance.refreshAndFindFileByIoFile(file)
def md5(file: File): Array[Byte] = {
val md = MessageDigest.getInstance("MD5")
val isSource = file.getName.endsWith(".java") || file.getName.endsWith(".scala")
if (isSource) {
val text = scala.io.Source.fromFile(file, "UTF-8").mkString.replace("\\r", "")
md.digest(text.getBytes("UTF8"))
} else {
md.digest(FileUtil.loadBytes(new FileInputStream(file)))
}
}
private def computeChecksums(): mutable.HashMap[String, Array[Byte]] = {
val result = new mutable.HashMap[String, Array[Byte]]
def computeForDir(dir: File) {
if (dir.exists) dir.listFiles().foreach { f =>
if (f.isDirectory) computeForDir(f)
else {
result += (testDataBasePath.toURI.relativize(f.toURI).toString -> md5(f))
}
}
}
computeForDir(srcDir)
computeForDir(outDir)
result
}
protected def outDir: File = new File(testDataBasePath, "out")
protected def srcDir: File = new File(testDataBasePath, "src")
protected def saveChecksums() = {
checksums = computeChecksums()
val file = new File(testDataBasePath, checksumsFileName)
FileUtil.createIfDoesntExist(file)
val oos = new ObjectOutputStream(new FileOutputStream(file))
try {
oos.writeObject(checksums)
}
finally {
oos.close()
}
}
private def loadChecksums(): Boolean = {
val file = new File(testDataBasePath, checksumsFileName)
if (!file.exists) {
return false
}
val ois = new ObjectInputStream(new FileInputStream(file))
val result = try {
val obj = ois.readObject()
obj match {
case map: mutable.HashMap[String, Array[Byte]]@unchecked => checksums = map; true
case _ => false
}
}
finally ois.close()
result
}
private def testDataProjectIsValid(): Boolean = {
sameSourceFiles() && loadChecksums() && checksums.keys.forall(checkFile) && getImlFile != null
}
private def sameSourceFiles(): Boolean = {
def numberOfFiles(dir: File): Int = dir match {
case d: File if d.isDirectory => d.listFiles().map(numberOfFiles).sum
case f => 1
}
val existingFilesNumber = numberOfFiles(srcDir)
sourceFiles.size == existingFilesNumber && sourceFiles.forall {
case (relPath, text) => fileWithTextExists(new File(srcDir, relPath), text)
}
}
private def fileWithTextExists(file: File, fileText: String): Boolean = {
if (!file.exists()) false
else {
val oldText = scala.io.Source.fromFile(file, "UTF-8").mkString
oldText.replace("\\r", "") == fileText.replace("\\r", "")
}
}
private def checkFile(relPath: String): Boolean = {
val file = new File(testDataBasePath, relPath)
file.exists && util.Arrays.equals(checksums(relPath), md5(file))
}
}
| double-y/translation-idea-plugin | test/org/jetbrains/plugins/scala/debugger/ScalaDebuggerTestBase.scala | Scala | apache-2.0 | 7,836 |
/*
* Copyright 2016-2018 SN127.fi
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package fi.sn127.tackler.report
import fi.sn127.tackler.model.Txns
trait ExporterLike extends OutputLike {
def writeExport(writer: Writer, txns: Txns): Unit
}
| jaa127/tackler | core/src/main/scala/fi/sn127/tackler/report/ExporterLike.scala | Scala | apache-2.0 | 764 |
package de.mineformers.visum.event
/**
* Event
*
* @author PaleoCrafter
*/
trait Event
{
private var _consumed = false
def consumed = _consumed
def consume() = _consumed = true
}
| MineFormers/Visum | src/main/scala/de.mineformers.visum/event/Event.scala | Scala | mit | 192 |
package org.http4s
package testing
import cats._
import cats.data.NonEmptyList
import cats.effect.{Effect, IO}
import cats.effect.laws.discipline.arbitrary._
import cats.effect.laws.util.TestContext
import cats.implicits.{catsSyntaxEither => _, _}
import fs2.{Pure, Stream}
import java.nio.charset.{Charset => NioCharset}
import java.time._
import java.util.Locale
import org.http4s.headers._
import org.http4s.syntax.literals._
import org.http4s.syntax.string._
import org.http4s.util.CaseInsensitiveString
import org.scalacheck._
import org.scalacheck.Arbitrary._
import org.scalacheck.Gen._
import org.scalacheck.rng.Seed
import scala.collection.JavaConverters._
import scala.concurrent.duration._
import scala.concurrent.Future
import scala.util.Try
trait ArbitraryInstances {
private implicit class ParseResultSyntax[A](self: ParseResult[A]) {
def yolo: A = self.valueOr(e => sys.error(e.toString))
}
implicit val arbitraryCaseInsensitiveString: Arbitrary[CaseInsensitiveString] =
Arbitrary(arbitrary[String].map(_.ci))
implicit val cogenCaseInsensitiveString: Cogen[CaseInsensitiveString] =
Cogen[String].contramap(_.value.toLowerCase(Locale.ROOT))
implicit def arbitraryNonEmptyList[A: Arbitrary]: Arbitrary[NonEmptyList[A]] =
Arbitrary {
for {
a <- arbitrary[A]
list <- arbitrary[List[A]]
} yield NonEmptyList(a, list)
}
val genChar: Gen[Char] = choose('\\u0000', '\\u007F')
val ctlChar: List[Char] = ('\\u007F' +: ('\\u0000' to '\\u001F')).toList
val lws: List[Char] = " \\t".toList
val genCrLf: Gen[String] = const("\\r\\n")
val genRightLws: Gen[String] = nonEmptyListOf(oneOf(lws)).map(_.mkString)
val genLws: Gen[String] =
oneOf(sequence[List[String], String](List(genCrLf, genRightLws)).map(_.mkString), genRightLws)
val octets: List[Char] = ('\\u0000' to '\\u00FF').toList
val genOctet: Gen[Char] = oneOf(octets)
val allowedText: List[Char] = octets.diff(ctlChar)
val genText: Gen[String] = oneOf(nonEmptyListOf(oneOf(allowedText)).map(_.mkString), genLws)
// TODO Fix Rfc2616BasicRules.QuotedString to support the backslash character
val allowedQDText: List[Char] = allowedText.filterNot(c => c == '"' || c == '\\\\')
val genQDText: Gen[String] = nonEmptyListOf(oneOf(allowedQDText)).map(_.mkString)
val genQuotedPair: Gen[String] =
genChar.map(c => s"\\\\$c")
val genQuotedString: Gen[String] = oneOf(genQDText, genQuotedPair).map(s => s"""\\"$s\\"""")
val genTchar: Gen[Char] = oneOf {
Seq('!', '#', '$', '%', '&', '\\'', '*', '+', '-', '.', '^', '_', '`', '|', '~') ++
('0' to '9') ++ ('A' to 'Z') ++ ('a' to 'z')
}
val genToken: Gen[String] =
nonEmptyListOf(genTchar).map(_.mkString)
val genVchar: Gen[Char] =
oneOf('\\u0021' to '\\u007e')
val genFieldVchar: Gen[Char] =
genVchar
val genFieldContent: Gen[String] =
for {
head <- genFieldVchar
tail <- containerOf[Vector, Vector[Char]](
frequency(
9 -> genFieldVchar.map(Vector(_)),
1 -> (for {
spaces <- nonEmptyContainerOf[Vector, Char](oneOf(' ', '\\t'))
fieldVchar <- genFieldVchar
} yield spaces :+ fieldVchar)
)
).map(_.flatten)
} yield (head +: tail).mkString
val genFieldValue: Gen[String] =
genFieldContent
val genStandardMethod: Gen[Method] =
oneOf(Method.all)
implicit val arbitraryMethod: Arbitrary[Method] = Arbitrary(
frequency(
10 -> genStandardMethod,
1 -> genToken.map(Method.fromString(_).yolo)
))
implicit val cogenMethod: Cogen[Method] =
Cogen[Int].contramap(_.##)
val genValidStatusCode =
choose(100, 599)
val genStandardStatus =
oneOf(Status.registered.toSeq)
val genCustomStatus = for {
code <- genValidStatusCode
reason <- arbitrary[String]
} yield Status.fromIntAndReason(code, reason).yolo
implicit val arbitraryStatus: Arbitrary[Status] = Arbitrary(
frequency(
10 -> genStandardStatus,
1 -> genCustomStatus
))
implicit val cogenStatus: Cogen[Status] =
Cogen[Int].contramap(_.code)
implicit val arbitraryQueryParam: Arbitrary[(String, Option[String])] =
Arbitrary {
frequency(
5 -> {
for {
k <- arbitrary[String]
v <- arbitrary[Option[String]]
} yield (k, v)
},
2 -> const(("foo" -> Some("bar"))) // Want some repeats
)
}
implicit val arbitraryQuery: Arbitrary[Query] =
Arbitrary {
for {
n <- size
vs <- containerOfN[Vector, (String, Option[String])](n % 8, arbitraryQueryParam.arbitrary)
} yield Query(vs: _*)
}
implicit val arbitraryHttpVersion: Arbitrary[HttpVersion] =
Arbitrary {
for {
major <- choose(0, 9)
minor <- choose(0, 9)
} yield HttpVersion.fromVersion(major, minor).yolo
}
implicit val cogenHttpVersion: Cogen[HttpVersion] =
Cogen[(Int, Int)].contramap(v => (v.major, v.minor))
implicit val arbitraryNioCharset: Arbitrary[NioCharset] =
Arbitrary(oneOf(NioCharset.availableCharsets.values.asScala.toSeq))
implicit val cogenNioCharset: Cogen[NioCharset] =
Cogen[String].contramap(_.name)
implicit val arbitraryCharset: Arbitrary[Charset] =
Arbitrary { arbitrary[NioCharset].map(Charset.fromNioCharset) }
implicit val cogenCharset: Cogen[Charset] =
Cogen[NioCharset].contramap(_.nioCharset)
implicit val arbitraryQValue: Arbitrary[QValue] =
Arbitrary {
oneOf(const(0), const(1000), choose(0, 1000))
.map(QValue.fromThousandths(_).yolo)
}
implicit val cogenQValue: Cogen[QValue] =
Cogen[Int].contramap(_.thousandths)
implicit val arbitraryCharsetRange: Arbitrary[CharsetRange] =
Arbitrary {
for {
charsetRange <- genCharsetRangeNoQuality
q <- arbitrary[QValue]
} yield charsetRange.withQValue(q)
}
implicit val cogenCharsetRange: Cogen[CharsetRange] =
Cogen[Either[(Charset, QValue), QValue]].contramap {
case CharsetRange.Atom(charset, qValue) =>
Left((charset, qValue))
case CharsetRange.`*`(qValue) =>
Right(qValue)
}
implicit val arbitraryCharsetAtomRange: Arbitrary[CharsetRange.Atom] =
Arbitrary {
for {
charset <- arbitrary[Charset]
q <- arbitrary[QValue]
} yield charset.withQuality(q)
}
implicit val arbitraryCharsetSplatRange: Arbitrary[CharsetRange.`*`] =
Arbitrary { arbitrary[QValue].map(CharsetRange.`*`.withQValue(_)) }
def genCharsetRangeNoQuality: Gen[CharsetRange] =
frequency(
3 -> arbitrary[Charset].map(CharsetRange.fromCharset),
1 -> const(CharsetRange.`*`)
)
@deprecated("Use genCharsetRangeNoQuality. This one may cause deadlocks.", "0.15.7")
val charsetRangesNoQuality: Gen[CharsetRange] =
genCharsetRangeNoQuality
implicit val arbitraryAcceptCharset: Arbitrary[`Accept-Charset`] =
Arbitrary {
for {
// make a set first so we don't have contradictory q-values
charsetRanges <- nonEmptyContainerOf[Set, CharsetRange](genCharsetRangeNoQuality)
.map(_.toVector)
qValues <- containerOfN[Vector, QValue](charsetRanges.size, arbitraryQValue.arbitrary)
charsetRangesWithQ = charsetRanges.zip(qValues).map {
case (range, q) => range.withQValue(q)
}
} yield `Accept-Charset`(charsetRangesWithQ.head, charsetRangesWithQ.tail: _*)
}
def genContentCodingNoQuality: Gen[ContentCoding] =
Gen.frequency(
(10, oneOf(ContentCoding.standard.values.toSeq)),
(2, genToken.map(ContentCoding.unsafeFromString))
)
implicit val arbitraryContentCoding: Arbitrary[ContentCoding] =
Arbitrary {
for {
cc <- genContentCodingNoQuality
q <- arbitrary[QValue]
} yield cc.withQValue(q)
}
implicit val cogenContentCoding: Cogen[ContentCoding] =
Cogen[String].contramap(_.coding)
// MediaRange exepects the quoted pair without quotes
val http4sGenUnquotedPair = genQuotedPair.map { c =>
c.substring(1, c.length - 1)
}
val http4sGenMediaRangeExtension: Gen[(String, String)] =
for {
token <- genToken
value <- oneOf(http4sGenUnquotedPair, genQDText)
} yield (token, value)
val http4sGenMediaRangeExtensions: Gen[Map[String, String]] =
Gen.listOf(http4sGenMediaRangeExtension).map(_.toMap)
val http4sGenMediaRange: Gen[MediaRange] =
for {
`type` <- genToken
extensions <- http4sGenMediaRangeExtensions
} yield new MediaRange(`type`, extensions)
implicit val http4sArbitraryMediaRange: Arbitrary[MediaRange] =
Arbitrary {
for {
`type` <- genToken
extensions <- http4sGenMediaRangeExtensions
} yield new MediaRange(`type`, extensions)
}
implicit val http4sCogenMediaRange: Cogen[MediaRange] =
Cogen[(String, Map[String, String])].contramap(m => (m.mainType, m.extensions))
implicit val arbitraryAcceptEncoding: Arbitrary[`Accept-Encoding`] =
Arbitrary {
for {
// make a set first so we don't have contradictory q-values
contentCodings <- nonEmptyContainerOf[Set, ContentCoding](genContentCodingNoQuality)
.map(_.toVector)
qValues <- containerOfN[Vector, QValue](contentCodings.size, arbitraryQValue.arbitrary)
contentCodingsWithQ = contentCodings.zip(qValues).map {
case (coding, q) => coding.withQValue(q)
}
} yield `Accept-Encoding`(contentCodingsWithQ.head, contentCodingsWithQ.tail: _*)
}
implicit val arbitraryContentEncoding: Arbitrary[`Content-Encoding`] =
Arbitrary {
for {
contentCoding <- genContentCodingNoQuality
} yield `Content-Encoding`(contentCoding)
}
def genLanguageTagNoQuality: Gen[LanguageTag] =
frequency(
3 -> (for {
primaryTag <- genToken
subTags <- frequency(4 -> Nil, 1 -> listOf(genToken))
} yield LanguageTag(primaryTag, subTags = subTags)),
1 -> const(LanguageTag.`*`)
)
implicit val arbitraryLanguageTag: Arbitrary[LanguageTag] =
Arbitrary {
for {
lt <- genLanguageTagNoQuality
q <- arbitrary[QValue]
} yield lt.copy(q = q)
}
implicit val arbitraryAcceptLanguage: Arbitrary[`Accept-Language`] =
Arbitrary {
for {
// make a set first so we don't have contradictory q-values
languageTags <- nonEmptyContainerOf[Set, LanguageTag](genLanguageTagNoQuality)
.map(_.toVector)
qValues <- containerOfN[Vector, QValue](languageTags.size, arbitraryQValue.arbitrary)
tagsWithQ = languageTags.zip(qValues).map { case (tag, q) => tag.copy(q = q) }
} yield `Accept-Language`(tagsWithQ.head, tagsWithQ.tail: _*)
}
implicit val arbitraryUrlForm: Arbitrary[UrlForm] = Arbitrary {
// new String("\\ufffe".getBytes("UTF-16"), "UTF-16") != "\\ufffe".
// Ain't nobody got time for that.
arbitrary[Map[String, Seq[String]]]
.map(UrlForm.apply)
.suchThat(!_.toString.contains('\\ufffe'))
}
implicit val arbitraryAllow: Arbitrary[Allow] =
Arbitrary {
for {
methods <- nonEmptyContainerOf[Set, Method](arbitrary[Method]).map(_.toList)
} yield Allow(methods.head, methods.tail: _*)
}
implicit val arbitraryContentLength: Arbitrary[`Content-Length`] =
Arbitrary {
for {
long <- arbitrary[Long] if long > 0L
} yield `Content-Length`.unsafeFromLong(long)
}
implicit val arbitraryXB3TraceId: Arbitrary[`X-B3-TraceId`] =
Arbitrary {
for {
long <- arbitrary[Long]
} yield `X-B3-TraceId`(long)
}
implicit val arbitraryXB3SpanId: Arbitrary[`X-B3-SpanId`] =
Arbitrary {
for {
long <- arbitrary[Long]
} yield `X-B3-SpanId`(long)
}
implicit val arbitraryXB3ParentSpanId: Arbitrary[`X-B3-ParentSpanId`] =
Arbitrary {
for {
long <- arbitrary[Long]
} yield `X-B3-ParentSpanId`(long)
}
implicit val arbitraryXB3Flags: Arbitrary[`X-B3-Flags`] =
Arbitrary {
for {
flags <- Gen.listOfN(
3,
Gen.oneOf(
`X-B3-Flags`.Flag.Debug,
`X-B3-Flags`.Flag.Sampled,
`X-B3-Flags`.Flag.SamplingSet))
} yield `X-B3-Flags`(flags.toSet)
}
implicit val arbitraryXB3Sampled: Arbitrary[`X-B3-Sampled`] =
Arbitrary {
for {
boolean <- arbitrary[Boolean]
} yield `X-B3-Sampled`(boolean)
}
val genHttpDate: Gen[HttpDate] = {
val min = ZonedDateTime
.of(1900, 1, 1, 0, 0, 0, 0, ZoneId.of("UTC"))
.toInstant
.toEpochMilli / 1000
val max = ZonedDateTime
.of(9999, 12, 31, 23, 59, 59, 0, ZoneId.of("UTC"))
.toInstant
.toEpochMilli / 1000
choose[Long](min, max).map(HttpDate.unsafeFromEpochSecond)
}
implicit val arbitraryDateHeader: Arbitrary[headers.Date] =
Arbitrary {
for {
httpDate <- genHttpDate
} yield headers.Date(httpDate)
}
val genHttpExpireDate: Gen[HttpDate] = {
// RFC 2616 says Expires should be between now and 1 year in the future, though other values are allowed
val min = ZonedDateTime.of(LocalDateTime.now, ZoneId.of("UTC")).toInstant.toEpochMilli / 1000
val max = ZonedDateTime
.of(LocalDateTime.now.plusYears(1), ZoneId.of("UTC"))
.toInstant
.toEpochMilli / 1000
choose[Long](min, max).map(HttpDate.unsafeFromEpochSecond)
}
val genFiniteDuration: Gen[FiniteDuration] =
// Only consider positive durations
Gen.posNum[Long].map(_.seconds)
implicit val arbitraryExpiresHeader: Arbitrary[headers.Expires] =
Arbitrary {
for {
date <- genHttpExpireDate
} yield headers.Expires(date)
}
val http4sGenMediaRangeAndQValue: Gen[MediaRangeAndQValue] =
for {
mediaRange <- http4sGenMediaRange
qValue <- arbitrary[QValue]
} yield MediaRangeAndQValue(mediaRange, qValue)
implicit val http4sArbitraryAcceptHeader: Arbitrary[headers.Accept] =
Arbitrary {
for {
values <- nonEmptyListOf(http4sGenMediaRangeAndQValue)
} yield headers.Accept(NonEmptyList.of(values.head, values.tail: _*))
}
implicit val arbitraryRetryAfterHeader: Arbitrary[headers.`Retry-After`] =
Arbitrary {
for {
retry <- Gen.oneOf(genHttpExpireDate.map(Left(_)), Gen.posNum[Long].map(Right(_)))
} yield
retry.fold(
headers.`Retry-After`.apply,
headers.`Retry-After`.unsafeFromLong
)
}
implicit val arbitraryAgeHeader: Arbitrary[headers.Age] =
Arbitrary {
for {
// age is always positive
age <- genFiniteDuration
} yield headers.Age.unsafeFromDuration(age)
}
implicit val arbitrarySTS: Arbitrary[headers.`Strict-Transport-Security`] =
Arbitrary {
for {
// age is always positive
age <- genFiniteDuration
includeSubDomains <- Gen.oneOf(true, false)
preload <- Gen.oneOf(true, false)
} yield
headers.`Strict-Transport-Security`.unsafeFromDuration(age, includeSubDomains, preload)
}
implicit val arbitraryTransferEncoding: Arbitrary[`Transfer-Encoding`] =
Arbitrary {
for {
codings <- arbitrary[NonEmptyList[TransferCoding]]
} yield `Transfer-Encoding`(codings)
}
implicit val arbitraryRawHeader: Arbitrary[Header.Raw] =
Arbitrary {
for {
token <- genToken
value <- genFieldValue
} yield Header.Raw(token.ci, value)
}
implicit val arbitraryHeader: Arbitrary[Header] =
Arbitrary {
oneOf(
arbitrary[`Accept-Charset`],
arbitrary[Allow],
arbitrary[`Content-Length`],
arbitrary[Date],
arbitrary[Header.Raw]
)
}
implicit val arbitraryHeaders: Arbitrary[Headers] =
Arbitrary(listOf(arbitrary[Header]).map(Headers(_: _*)))
implicit val arbitraryServerSentEvent: Arbitrary[ServerSentEvent] = {
import ServerSentEvent._
def singleLineString: Gen[String] =
arbitrary[String].suchThat { s =>
!s.contains("\\r") && !s.contains("\\n")
}
Arbitrary(for {
data <- singleLineString
event <- frequency(
4 -> None,
1 -> singleLineString.map(Some.apply)
)
id <- frequency(
8 -> None,
1 -> Some(EventId.reset),
1 -> singleLineString.suchThat(_.nonEmpty).map(id => Some(EventId(id)))
)
retry <- frequency(
4 -> None,
1 -> posNum[Long].map(Some.apply)
)
} yield ServerSentEvent(data, event, id, retry))
}
// https://tools.ietf.org/html/rfc2234#section-6
val genHexDigit: Gen[Char] = oneOf(
Seq('0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'))
private implicit def semigroupGen[T: Semigroup]: Semigroup[Gen[T]] = new Semigroup[Gen[T]] {
def combine(g1: Gen[T], g2: Gen[T]): Gen[T] = for { t1 <- g1; t2 <- g2 } yield t1 |+| t2
}
private def timesBetween[T: Monoid](min: Int, max: Int, g: Gen[T]): Gen[T] =
for {
n <- choose(min, max)
l <- listOfN(n, g).suchThat(_.length == n)
} yield l.fold(Monoid[T].empty)(_ |+| _)
private def times[T: Monoid](n: Int, g: Gen[T]): Gen[T] =
listOfN(n, g).suchThat(_.length == n).map(_.reduce(_ |+| _))
private def atMost[T: Monoid](n: Int, g: Gen[T]): Gen[T] =
timesBetween(min = 0, max = n, g)
private def opt[T](g: Gen[T])(implicit ev: Monoid[T]): Gen[T] =
oneOf(g, const(ev.empty))
// https://tools.ietf.org/html/rfc3986#appendix-A
implicit val arbitraryIPv4: Arbitrary[Uri.IPv4] = Arbitrary {
val num = numChar.map(_.toString)
def range(min: Int, max: Int) = choose(min.toChar, max.toChar).map(_.toString)
val genDecOctet = oneOf(
num,
range(49, 57) |+| num,
const("1") |+| num |+| num,
const("2") |+| range(48, 52) |+| num,
const("25") |+| range(48, 51)
)
listOfN(4, genDecOctet).map(_.mkString(".")).map(Uri.IPv4.apply)
}
// https://tools.ietf.org/html/rfc3986#appendix-A
implicit val arbitraryIPv6: Arbitrary[Uri.IPv6] = Arbitrary {
val h16 = timesBetween(min = 1, max = 4, genHexDigit.map(_.toString))
val ls32 = oneOf(h16 |+| const(":") |+| h16, arbitraryIPv4.arbitrary.map(_.address.value))
val h16colon = h16 |+| const(":")
val :: = const("::")
oneOf(
times(6, h16colon) |+| ls32,
:: |+| times(5, h16colon) |+| ls32,
opt(h16) |+| :: |+| times(4, h16colon) |+| ls32,
opt(atMost(1, h16colon) |+| h16) |+| :: |+| times(3, h16colon) |+| ls32,
opt(atMost(2, h16colon) |+| h16) |+| :: |+| times(2, h16colon) |+| ls32,
opt(atMost(3, h16colon) |+| h16) |+| :: |+| opt(h16colon) |+| ls32,
opt(atMost(4, h16colon) |+| h16) |+| :: |+| ls32,
opt(atMost(5, h16colon) |+| h16) |+| :: |+| h16,
opt(atMost(6, h16colon) |+| h16) |+| ::
).map(Uri.IPv6.apply)
}
implicit val arbitraryUriHost: Arbitrary[Uri.Host] = Arbitrary {
val genRegName =
listOf(oneOf(genUnreserved, genPctEncoded, genSubDelims)).map(rn => Uri.RegName(rn.mkString))
oneOf(arbitraryIPv4.arbitrary, arbitraryIPv6.arbitrary, genRegName)
}
implicit val arbitraryAuthority: Arbitrary[Uri.Authority] = Arbitrary {
for {
userInfo <- identifier
maybeUserInfo <- Gen.option(userInfo)
host <- arbitraryUriHost.arbitrary
maybePort <- Gen.option(posNum[Int].suchThat(port => port >= 0 && port <= 65536))
} yield Uri.Authority(maybeUserInfo, host, maybePort)
}
val genPctEncoded: Gen[String] = const("%") |+| genHexDigit.map(_.toString) |+| genHexDigit.map(
_.toString)
val genUnreserved: Gen[Char] =
oneOf(alphaChar, numChar, const('-'), const('.'), const('_'), const('~'))
val genSubDelims: Gen[Char] = oneOf(Seq('!', '$', '&', '\\'', '(', ')', '*', '+', ',', ';', '='))
implicit val arbitraryScheme: Arbitrary[Uri.Scheme] = Arbitrary {
frequency(
5 -> Uri.Scheme.http,
5 -> Uri.Scheme.https,
1 -> scheme"HTTP",
1 -> scheme"HTTPS",
3 -> (for {
head <- alphaChar
tail <- listOf(
frequency(
36 -> alphaNumChar,
1 -> const('+'),
1 -> const('-'),
1 -> const('.')
)
)
} yield HttpCodec[Uri.Scheme].parseOrThrow(tail.mkString(head.toString, "", "")))
)
}
implicit val cogenScheme: Cogen[Uri.Scheme] =
Cogen[String].contramap(_.value.toLowerCase(Locale.ROOT))
implicit val arbitraryTransferCoding: Arbitrary[TransferCoding] = Arbitrary {
Gen.oneOf(
TransferCoding.chunked,
TransferCoding.compress,
TransferCoding.deflate,
TransferCoding.gzip,
TransferCoding.identity)
}
implicit val cogenTransferCoding: Cogen[TransferCoding] =
Cogen[String].contramap(_.coding.toLowerCase(Locale.ROOT))
/** https://tools.ietf.org/html/rfc3986 */
implicit val arbitraryUri: Arbitrary[Uri] = Arbitrary {
val genSegmentNzNc =
nonEmptyListOf(oneOf(genUnreserved, genPctEncoded, genSubDelims, const("@"))).map(_.mkString)
val genPChar = oneOf(genUnreserved, genPctEncoded, genSubDelims, const(":"), const("@"))
val genSegmentNz = nonEmptyListOf(genPChar).map(_.mkString)
val genSegment = listOf(genPChar).map(_.mkString)
val genPathEmpty = const("")
val genPathAbEmpty = listOf(const("/") |+| genSegment).map(_.mkString)
val genPathRootless = genSegmentNz |+| genPathAbEmpty
val genPathNoScheme = genSegmentNzNc |+| genPathAbEmpty
val genPathAbsolute = const("/") |+| opt(genPathRootless)
val genScheme = oneOf(Uri.Scheme.http, Uri.Scheme.https)
val genPath =
oneOf(genPathAbEmpty, genPathAbsolute, genPathNoScheme, genPathRootless, genPathEmpty)
val genFragment: Gen[Uri.Fragment] =
listOf(oneOf(genPChar, const("/"), const("?"))).map(_.mkString)
for {
scheme <- Gen.option(genScheme)
authority <- Gen.option(arbitraryAuthority.arbitrary)
path <- genPath
query <- arbitraryQuery.arbitrary
fragment <- Gen.option(genFragment)
} yield Uri(scheme, authority, path, query, fragment)
}
// TODO This could be a lot more interesting.
// See https://github.com/functional-streams-for-scala/fs2/blob/fd3d0428de1e71c10d1578f2893ee53336264ffe/core/shared/src/test/scala/fs2/TestUtil.scala#L42
implicit def genEntityBody[F[_]]: Gen[Stream[Pure, Byte]] = Gen.sized { size =>
Gen.listOfN(size, arbitrary[Byte]).map(Stream.emits)
}
// Borrowed from cats-effect tests for the time being
def cogenFuture[A](implicit ec: TestContext, cg: Cogen[Try[A]]): Cogen[Future[A]] =
Cogen { (seed: Seed, fa: Future[A]) =>
ec.tick()
fa.value match {
case None => seed
case Some(ta) => cg.perturb(seed, ta)
}
}
implicit def cogenEntityBody[F[_]](implicit F: Effect[F], ec: TestContext): Cogen[EntityBody[F]] =
catsEffectLawsCogenForIO(cogenFuture[Vector[Byte]]).contramap { stream =>
var bytes: Vector[Byte] = null
val readBytes = IO(bytes)
F.runAsync(stream.compile.toVector) {
case Right(bs) => IO { bytes = bs }
case Left(t) => IO.raiseError(t)
} *> readBytes
}
implicit def arbitraryEntity[F[_]]: Arbitrary[Entity[F]] =
Arbitrary(Gen.sized { size =>
for {
body <- genEntityBody
length <- Gen.oneOf(Some(size.toLong), None)
} yield Entity(body, length)
})
implicit def cogenEntity[F[_]](implicit F: Effect[F], ec: TestContext): Cogen[Entity[F]] =
Cogen[(EntityBody[F], Option[Long])].contramap(entity => (entity.body, entity.length))
implicit def arbitraryEntityEncoder[F[_], A](
implicit CA: Cogen[A],
AF: Arbitrary[F[Entity[F]]]): Arbitrary[EntityEncoder[F, A]] =
Arbitrary(for {
f <- arbitrary[A => F[Entity[F]]]
hs <- arbitrary[Headers]
} yield EntityEncoder.encodeBy(hs)(f))
}
object ArbitraryInstances extends ArbitraryInstances {
// This were introduced after .0 and need to be kept out of the
// trait. We can move them back into the trait in the next .0.
}
| reactormonk/http4s | testing/src/main/scala/org/http4s/testing/ArbitraryInstances.scala | Scala | apache-2.0 | 24,110 |
package test.processing
import slogger.services.processing.extraction.DataExtractorDaoMongo
import slogger.services.processing.extraction.DataExtractorImpl
import slogger.services.processing.extraction.DataExtractor
import slogger.services.processing.extraction.DataExtractorDao
import slogger.model.specification.extraction.ExtractionSpecs
import play.api.libs.json.Json
import slogger.model.common.TimePeriod
import slogger.model.specification.extraction.LastPeriod
import com.github.nscala_time.time.Imports._
import slogger.model.specification.extraction.TimeLimits
class DataExtractorTest extends BaseDaoTest {
val dao: DataExtractorDao = new DataExtractorDaoMongo(dbProvider)
val extractor: DataExtractor = new DataExtractorImpl(dao)
it should "work" in {
val specs = ExtractionSpecs(
filter = None,
projection = None,
timeLimits = TimeLimits.forLast(TimePeriod.Month),
slicing = None
)
val rez = extractor.extract(specs, DateTime.now).head
}
} | IvanMykhailov/stats-logger | core/src/test/scala/test/processing/DataExtractorTest.scala | Scala | mit | 1,009 |
package io.github.yzernik.bitcoinscodec.structures
import io.github.yzernik.bitcoinscodec.CodecSuite
import scodec.bits.{ByteVector, _}
class TxInSpec extends CodecSuite {
"TxIn codec" should {
"roundtrip" in {
roundtrip(TxIn(
OutPoint(
Hash(ByteVector.fill(32)(0x42)),
55555L),
Script(ByteVector(0x123456)),
111111L))
}
"decode" in {
val txin = TxIn(
OutPoint(
Hash(hex"6dbddb085b1d8af75184f0bc01fad58d1266e9b63b50881990e4b40d6aee3629".reverse),
0L),
Script(hex"""
483045022100f3581e1972ae8ac7c7367a7a253bc1135223
adb9a468bb3a59233f45bc578380022059af01ca17d00e4183
7a1d58e97aa31bae584edec28d35bd96923690913bae9a0141049
c02bfc97ef236ce6d8fe5d94013c721e915982acd2b12b65d9b7d59e
20a842005f8fc4e02532e873d37b96f09d6d4511ada8f14042f46614a4c
70c0f14beff5
"""),
4294967295L)
val bytes = hex"""
6D BD DB 08 5B 1D 8A F7 51 84 F0 BC 01 FA D5 8D
12 66 E9 B6 3B 50 88 19 90 E4 B4 0D 6A EE 36 29
00 00 00 00
8B
48 30 45 02 21 00 F3 58 1E 19 72 AE 8A C7 C7 36
7A 7A 25 3B C1 13 52 23 AD B9 A4 68 BB 3A 59 23
3F 45 BC 57 83 80 02 20 59 AF 01 CA 17 D0 0E 41
83 7A 1D 58 E9 7A A3 1B AE 58 4E DE C2 8D 35 BD
96 92 36 90 91 3B AE 9A 01 41 04 9C 02 BF C9 7E
F2 36 CE 6D 8F E5 D9 40 13 C7 21 E9 15 98 2A CD
2B 12 B6 5D 9B 7D 59 E2 0A 84 20 05 F8 FC 4E 02
53 2E 87 3D 37 B9 6F 09 D6 D4 51 1A DA 8F 14 04
2F 46 61 4A 4C 70 C0 F1 4B EF F5
FF FF FF FF
""".toBitVector
shouldDecodeFullyTo(TxIn.codec, bytes, txin)
}
}
}
| yzernik/bitcoin-scodec | src/test/scala/io/github/yzernik/bitcoinscodec/structures/TxInSpec.scala | Scala | mit | 1,762 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import java.io._
import java.util.Properties
import scala.collection.JavaConverters._
import scala.util.control.NonFatal
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hive.ql.exec.{RecordReader, RecordWriter}
import org.apache.hadoop.hive.serde.serdeConstants
import org.apache.hadoop.hive.serde2.AbstractSerDe
import org.apache.hadoop.hive.serde2.objectinspector._
import org.apache.hadoop.io.Writable
import org.apache.spark.TaskContext
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.execution._
import org.apache.spark.sql.hive.HiveInspectors
import org.apache.spark.sql.hive.HiveShim._
import org.apache.spark.sql.types.DataType
import org.apache.spark.util.{CircularBuffer, Utils}
/**
* Transforms the input by forking and running the specified script.
*
* @param script the command that should be executed.
* @param output the attributes that are produced by the script.
* @param child logical plan whose output is transformed.
* @param ioschema the class set that defines how to handle input/output data.
*/
private[hive] case class HiveScriptTransformationExec(
script: String,
output: Seq[Attribute],
child: SparkPlan,
ioschema: ScriptTransformationIOSchema)
extends BaseScriptTransformationExec {
import HiveScriptIOSchema._
private def createOutputIteratorWithSerde(
writerThread: BaseScriptTransformationWriterThread,
inputStream: InputStream,
proc: Process,
stderrBuffer: CircularBuffer,
outputSerde: AbstractSerDe,
outputSoi: StructObjectInspector,
hadoopConf: Configuration): Iterator[InternalRow] = {
new Iterator[InternalRow] with HiveInspectors {
private var completed = false
val scriptOutputStream = new DataInputStream(inputStream)
val scriptOutputReader =
recordReader(ioschema, scriptOutputStream, hadoopConf).orNull
var scriptOutputWritable: Writable = null
val reusedWritableObject = outputSerde.getSerializedClass.getConstructor().newInstance()
val mutableRow = new SpecificInternalRow(output.map(_.dataType))
@transient
lazy val unwrappers = outputSoi.getAllStructFieldRefs.asScala.map(unwrapperFor)
override def hasNext: Boolean = {
if (completed) {
return false
}
try {
if (scriptOutputWritable == null) {
scriptOutputWritable = reusedWritableObject
if (scriptOutputReader != null) {
if (scriptOutputReader.next(scriptOutputWritable) <= 0) {
checkFailureAndPropagate(writerThread, null, proc, stderrBuffer)
completed = true
return false
}
} else {
try {
scriptOutputWritable.readFields(scriptOutputStream)
} catch {
case _: EOFException =>
// This means that the stdout of `proc` (i.e. TRANSFORM process) has exhausted.
// Ideally the proc should *not* be alive at this point but
// there can be a lag between EOF being written out and the process
// being terminated. So explicitly waiting for the process to be done.
checkFailureAndPropagate(writerThread, null, proc, stderrBuffer)
completed = true
return false
}
}
}
true
} catch {
case NonFatal(e) =>
// If this exception is due to abrupt / unclean termination of `proc`,
// then detect it and propagate a better exception message for end users
checkFailureAndPropagate(writerThread, e, proc, stderrBuffer)
throw e
}
}
override def next(): InternalRow = {
if (!hasNext) {
throw new NoSuchElementException
}
val raw = outputSerde.deserialize(scriptOutputWritable)
scriptOutputWritable = null
val dataList = outputSoi.getStructFieldsDataAsList(raw)
var i = 0
while (i < dataList.size()) {
if (dataList.get(i) == null) {
mutableRow.setNullAt(i)
} else {
unwrappers(i)(dataList.get(i), mutableRow, i)
}
i += 1
}
mutableRow
}
}
}
override def processIterator(
inputIterator: Iterator[InternalRow],
hadoopConf: Configuration): Iterator[InternalRow] = {
val (outputStream, proc, inputStream, stderrBuffer) = initProc
val (inputSerde, inputSoi) = initInputSerDe(ioschema, child.output).getOrElse((null, null))
// For HiveScriptTransformationExec, if inputSerde == null, but outputSerde != null
// We will use StringBuffer to pass data, in this case, we should cast data as string too.
val finalInput = if (inputSerde == null) {
inputExpressionsWithoutSerde
} else {
child.output
}
val outputProjection = new InterpretedProjection(finalInput, child.output)
// This new thread will consume the ScriptTransformation's input rows and write them to the
// external process. That process's output will be read by this current thread.
val writerThread = HiveScriptTransformationWriterThread(
inputIterator.map(outputProjection),
finalInput.map(_.dataType),
inputSerde,
inputSoi,
ioschema,
outputStream,
proc,
stderrBuffer,
TaskContext.get(),
hadoopConf
)
val (outputSerde, outputSoi) = {
initOutputSerDe(ioschema, output).getOrElse((null, null))
}
val outputIterator = if (outputSerde == null) {
createOutputIteratorWithoutSerde(writerThread, inputStream, proc, stderrBuffer)
} else {
createOutputIteratorWithSerde(
writerThread, inputStream, proc, stderrBuffer, outputSerde, outputSoi, hadoopConf)
}
writerThread.start()
outputIterator
}
override protected def withNewChildInternal(newChild: SparkPlan): HiveScriptTransformationExec =
copy(child = newChild)
}
private[hive] case class HiveScriptTransformationWriterThread(
iter: Iterator[InternalRow],
inputSchema: Seq[DataType],
inputSerde: AbstractSerDe,
inputSoi: StructObjectInspector,
ioSchema: ScriptTransformationIOSchema,
outputStream: OutputStream,
proc: Process,
stderrBuffer: CircularBuffer,
taskContext: TaskContext,
conf: Configuration)
extends BaseScriptTransformationWriterThread with HiveInspectors {
import HiveScriptIOSchema._
override def processRows(): Unit = {
val dataOutputStream = new DataOutputStream(outputStream)
val scriptInputWriter = recordWriter(ioSchema, dataOutputStream, conf).orNull
if (inputSerde == null) {
processRowsWithoutSerde()
} else {
// Convert Spark InternalRows to hive data via `HiveInspectors.wrapperFor`.
val hiveData = new Array[Any](inputSchema.length)
val fieldOIs = inputSoi.getAllStructFieldRefs.asScala.map(_.getFieldObjectInspector).toArray
val wrappers = fieldOIs.zip(inputSchema).map { case (f, dt) => wrapperFor(f, dt) }
iter.foreach { row =>
var i = 0
while (i < fieldOIs.length) {
hiveData(i) = if (row.isNullAt(i)) null else wrappers(i)(row.get(i, inputSchema(i)))
i += 1
}
val writable = inputSerde.serialize(hiveData, inputSoi)
if (scriptInputWriter != null) {
scriptInputWriter.write(writable)
} else {
prepareWritable(writable, ioSchema.outputSerdeProps).write(dataOutputStream)
}
}
}
}
}
object HiveScriptIOSchema extends HiveInspectors {
def initInputSerDe(
ioschema: ScriptTransformationIOSchema,
input: Seq[Expression]): Option[(AbstractSerDe, StructObjectInspector)] = {
ioschema.inputSerdeClass.map { serdeClass =>
val (columns, columnTypes) = parseAttrs(input)
val serde = initSerDe(serdeClass, columns, columnTypes, ioschema.inputSerdeProps)
val fieldObjectInspectors = columnTypes.map(toInspector)
val objectInspector = ObjectInspectorFactory
.getStandardStructObjectInspector(columns.asJava, fieldObjectInspectors.asJava)
(serde, objectInspector)
}
}
def initOutputSerDe(
ioschema: ScriptTransformationIOSchema,
output: Seq[Attribute]): Option[(AbstractSerDe, StructObjectInspector)] = {
ioschema.outputSerdeClass.map { serdeClass =>
val (columns, columnTypes) = parseAttrs(output)
val serde = initSerDe(serdeClass, columns, columnTypes, ioschema.outputSerdeProps)
val structObjectInspector = serde.getObjectInspector().asInstanceOf[StructObjectInspector]
(serde, structObjectInspector)
}
}
private def parseAttrs(attrs: Seq[Expression]): (Seq[String], Seq[DataType]) = {
val columns = attrs.zipWithIndex.map(e => s"${e._1.prettyName}_${e._2}")
val columnTypes = attrs.map(_.dataType)
(columns, columnTypes)
}
def initSerDe(
serdeClassName: String,
columns: Seq[String],
columnTypes: Seq[DataType],
serdeProps: Seq[(String, String)]): AbstractSerDe = {
val serde = Utils.classForName[AbstractSerDe](serdeClassName).getConstructor().
newInstance()
val columnTypesNames = columnTypes.map(_.toTypeInfo.getTypeName()).mkString(",")
var propsMap = serdeProps.toMap + (serdeConstants.LIST_COLUMNS -> columns.mkString(","))
propsMap = propsMap + (serdeConstants.LIST_COLUMN_TYPES -> columnTypesNames)
val properties = new Properties()
// Can not use properties.putAll(propsMap.asJava) in scala-2.12
// See https://github.com/scala/bug/issues/10418
propsMap.foreach { case (k, v) => properties.put(k, v) }
serde.initialize(null, properties)
serde
}
def recordReader(
ioschema: ScriptTransformationIOSchema,
inputStream: InputStream,
conf: Configuration): Option[RecordReader] = {
ioschema.recordReaderClass.map { klass =>
val instance = Utils.classForName[RecordReader](klass).getConstructor().
newInstance()
val props = new Properties()
// Can not use props.putAll(outputSerdeProps.toMap.asJava) in scala-2.12
// See https://github.com/scala/bug/issues/10418
ioschema.outputSerdeProps.toMap.foreach { case (k, v) => props.put(k, v) }
instance.initialize(inputStream, conf, props)
instance
}
}
def recordWriter(
ioschema: ScriptTransformationIOSchema,
outputStream: OutputStream,
conf: Configuration): Option[RecordWriter] = {
ioschema.recordWriterClass.map { klass =>
val instance = Utils.classForName[RecordWriter](klass).getConstructor().
newInstance()
instance.initialize(outputStream, conf)
instance
}
}
}
| ueshin/apache-spark | sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveScriptTransformationExec.scala | Scala | apache-2.0 | 11,698 |
/*
* This is free and unencumbered software released into the public domain.
*
* Anyone is free to copy, modify, publish, use, compile, sell, or
* distribute this software, either in source code form or as a compiled
* binary, for any purpose, commercial or non-commercial, and by any
* means.
*
* In jurisdictions that recognize copyright laws, the author or authors
* of this software dedicate any and all copyright interest in the
* software to the public domain. We make this dedication for the benefit
* of the public at large and to the detriment of our heirs and
* successors. We intend this dedication to be an overt act of
* relinquishment in perpetuity of all present and future rights to this
* software under copyright law.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
* IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
* OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
* ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
* OTHER DEALINGS IN THE SOFTWARE.
*
* For more information, please refer to <http://unlicense.org/>
*/
package net.adamcin.vltpack.mojo
import java.util.Collections
import net.adamcin.vltpack._
import org.apache.maven.plugin.MojoExecutionException
import org.apache.maven.plugins.annotations.{LifecyclePhase, Mojo, Parameter, ResolutionScope}
import scala.collection.JavaConversions._
import scalax.io.Resource
/**
* Upload integration test dependencies, including vault packages and OSGi bundles, to the configured integration test
* server
* @since 1.0.0
* @author Mark Adamcin
*/
@Mojo(name = "IT-upload-tests",
defaultPhase = LifecyclePhase.PRE_INTEGRATION_TEST,
requiresDependencyResolution = ResolutionScope.TEST,
threadSafe = true)
class ITUploadTestsMojo
extends BaseITMojo
with RequiresProject
with PackageDependencies
with ResolvesArtifacts
with OutputParameters
with UploadsPackages
with PutsBundles {
/**
* Set to true to skip execution of this mojo
*/
@Parameter(property = "vltpack.skip.IT-upload-tests")
val skip = false
/**
* List of artifactIds matching test package dependencies
*/
@Parameter
var testPackages = Collections.emptyList[String]
def testPackageArtifacts = resolveByArtifactIds(testPackages.toSet)
/**
* List of artifactIds matching OSGi bundle dependencies
*/
@Parameter
var testBundles = Collections.emptyList[String]
def testBundleArtifacts = resolveByArtifactIds(testBundles.toSet)
lazy val uploadTestsChecksum = {
val calc = new ChecksumCalculator
testPackages.foreach { calc.add }
testBundles.foreach { calc.add }
calc.calculate()
}
override def execute() {
super.execute()
skipWithTestsOrExecute(skip) {
if (!testPackages.isEmpty) {
getLog.info("uploading test packages...")
testPackageArtifacts.foreach(
(packageArtifact) => {
val shouldForce = force || inputFileModified(uploadTestsSha,
List(packageArtifact.getFile))
uploadPackageArtifact(packageArtifact)(shouldForce)
}
)
}
if (!testBundles.isEmpty) {
val shouldForceUploadBundles = !uploadTestsSha.exists() ||
Resource.fromFile(uploadTestsSha).string != uploadTestsChecksum
getLog.info("uploading test bundles...")
testBundleArtifacts.foreach {
(artifact) => Option(artifact.getFile) match {
case None => throw new MojoExecutionException("failed to resolve artifact: " + artifact.getId)
case Some(bundle) => {
if (shouldForceUploadBundles || inputFileModified(uploadTestsSha, List(bundle))) {
putTestBundle(bundle) match {
case Left(ex) => throw ex
case Right(messages) => messages.foreach { getLog.info(_) }
}
}
}
}
}
}
overwriteFile(uploadTestsSha, uploadTestsChecksum)
}
}
} | adamcin/vltpack-maven-plugin | src/main/scala/net/adamcin/vltpack/mojo/ITUploadTestsMojo.scala | Scala | unlicense | 4,176 |
package views.vrm_retention
import play.api.data.Forms.nonEmptyText
import play.api.data.Mapping
object KeeperConsent {
def keeperConsent: Mapping[String] = nonEmptyText
} | dvla/vrm-retention-online | app/views/vrm_retention/KeeperConsent.scala | Scala | mit | 176 |
package io.vamp.model.resolver
import io.vamp.common.notification.{ NotificationErrorException, NotificationProvider }
import io.vamp.model.artifact._
import io.vamp.model.notification.ParserError
import io.vamp.model.parser.Parser
import scala.language.postfixOps
trait ValueResolver {
this: NotificationProvider ⇒
val marker = '$'
def referenceAsPart(reference: ValueReference) = s"$marker{${reference.reference}}"
def referencesFor(value: String): List[ValueReference] = {
nodes(value).filter(_.isInstanceOf[VariableNode]).map(_.asInstanceOf[VariableNode].reference)
}
def resolve(value: String, provider: (ValueReference ⇒ String)): String = nodes(value).map {
case StringNode(string) ⇒ string
case VariableNode(reference) ⇒ provider(reference)
} mkString
def nodes(value: String): List[TraitResolverNode] = {
try {
if (value.isEmpty) Nil
else {
val parser = new Parser[Seq[TraitResolverNode]] {
override def parser(expression: String) = new TraitResolverParser(expression)
}
def compact(nodes: List[TraitResolverNode]): List[TraitResolverNode] = nodes match {
case StringNode(string1) :: StringNode(string2) :: tail ⇒ compact(StringNode(s"$string1$string2") :: tail)
case head :: tail ⇒ head :: compact(tail)
case Nil ⇒ Nil
}
compact(parser.parse(value).toList)
}
}
catch {
case e: NotificationErrorException ⇒ throw e
case e: Exception ⇒ throwException(ParserError(e.getMessage))
}
}
}
| dragoslav/vamp | model/src/main/scala/io/vamp/model/resolver/ValueResolver.scala | Scala | apache-2.0 | 1,596 |
package play.api.libs.ws.ahc
import javax.inject.{ Inject, Provider, Singleton }
import akka.stream.Materializer
import com.typesafe.sslconfig.ssl.SystemConfiguration
import com.typesafe.sslconfig.ssl.debug.DebugConfiguration
import play.api.inject.ApplicationLifecycle
import play.api.libs.ws.{ WSClient, WSClientConfig, WSConfigParser }
import play.api.{ Configuration, Environment }
import play.shaded.ahc.org.asynchttpclient.{ AsyncHttpClient, DefaultAsyncHttpClient }
import scala.concurrent.Future
/**
* Provides an instance of AsyncHttpClient configured from the Configuration object.
*
* @param configuration the Play configuration
* @param environment the Play environment
* @param applicationLifecycle app lifecycle, instance is closed automatically.
*/
@Singleton
class AsyncHttpClientProvider @Inject() (
configuration: Configuration,
environment: Environment,
applicationLifecycle: ApplicationLifecycle) extends Provider[AsyncHttpClient] {
private val wsClientConfig: WSClientConfig = {
new WSConfigParser(configuration.underlying, environment.classLoader).parse()
}
private val ahcWsClientConfig: AhcWSClientConfig = {
new AhcWSClientConfigParser(wsClientConfig, configuration.underlying, environment.classLoader).parse()
}
private val asyncHttpClientConfig = new AhcConfigBuilder(ahcWsClientConfig).build()
private def configure(): Unit = {
// JSSE depends on various system properties which must be set before JSSE classes
// are pulled into memory, so these must come first.
val loggerFactory = StandaloneAhcWSClient.loggerFactory
if (wsClientConfig.ssl.debug.enabled) {
new DebugConfiguration(loggerFactory).configure(wsClientConfig.ssl.debug)
}
new SystemConfiguration(loggerFactory).configure(wsClientConfig.ssl)
}
lazy val get: AsyncHttpClient = {
configure()
new DefaultAsyncHttpClient(asyncHttpClientConfig)
}
// Always close the AsyncHttpClient afterwards.
applicationLifecycle.addStopHook(() =>
Future.successful(get.close())
)
}
@Singleton
class WSClientProvider @Inject() (asyncHttpClient: AsyncHttpClient)(implicit materializer: Materializer)
extends Provider[WSClient] {
lazy val get: WSClient = {
new AhcWSClient(new StandaloneAhcWSClient(asyncHttpClient))
}
}
| aradchykov/playframework | framework/src/play-ahc-ws/src/main/scala/play/api/libs/ws/ahc/AhcWSClientProvider.scala | Scala | apache-2.0 | 2,307 |
package it.unipd.dei.diversity
import java.util.concurrent.TimeUnit
import it.unipd.dei.diversity.ExperimentUtil.{jMap, timed}
import it.unipd.dei.diversity.matroid._
import it.unipd.dei.experiment.Experiment
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
import org.rogach.scallop.ScallopConf
import scala.collection.mutable
import scala.reflect.ClassTag
import scala.util.Random
object MainMatroid {
// Set up Spark lazily, it will be initialized only if the algorithm needs it.
lazy val spark = {
val sparkConfig = new SparkConf(loadDefaults = true)
.setAppName("Matroid diversity")
val _s = SparkSession.builder()
.config(sparkConfig)
.getOrCreate()
_s.sparkContext.hadoopConfiguration.set("mapreduce.input.fileinputformat.input.dir.recursive", "true")
_s
}
def cliqueDiversity[T](subset: IndexedSubset[T],
distance: (T, T) => Double): Double = {
val n = subset.superSet.length
var currentDiversity: Double = 0
var i = 0
while (i<n) {
if (subset.contains(i)) {
var j = i + 1
while (j < n) {
if (subset.contains(j)) {
currentDiversity += distance(subset.get(i).get, subset.get(j).get)
}
j += 1
}
}
i += 1
}
currentDiversity
}
def main(args: Array[String]) {
val opts = new Opts(args)
opts.verify()
require(opts.categories.isDefined ^ opts.genres.isDefined ^ opts.topics.isDefined ^ opts.uniform.isDefined,
"exactly one between categories, genres, cardinalit, and topics can be defined")
val experiment = new Experiment()
.tag("input", opts.input())
.tag("k", opts.k())
.tag("algorithm", opts.algorithm())
.tag("version", BuildInfo.version)
.tag("git-revision", BuildInfo.gitRevision)
.tag("git-revcount", BuildInfo.gitRevCount)
.tag("git-branch", BuildInfo.gitBranch)
for ((k, v) <- SerializationUtils.metadata(opts.input())) {
experiment.tag("input." + k, v)
}
val setup =
if (opts.categories.isDefined) {
new WikipediaExperiment(spark, opts.input(), opts.categories.get)
} else if (opts.genres.isDefined) {
new SongExperiment(spark, opts.input(), opts.genres())
} else if (opts.topics.isDefined) {
new WikipediaLDAExperiment(spark, opts.input())
} else if (opts.uniform.isDefined) {
opts.uniform() match {
case "glove" => new GloVeExperiment(spark, opts.input(), opts.k())
}
} else {
throw new IllegalArgumentException("Must provide at least one between categories, genres, or topics")
}
run(opts, setup, experiment)
val counters = PerformanceMetrics.registry.getCounters.entrySet().iterator()
while(counters.hasNext) {
val c = counters.next()
experiment.append("counters",
jMap("name" -> c.getKey, "count" -> c.getValue.getCount))
}
println(experiment.toSimpleString)
experiment.saveAsJsonFile(true)
}
private def run[T:ClassTag](opts: Opts, setup: ExperimentalSetup[T], experiment: Experiment): Any = {
opts.algorithm() match {
case "random" =>
val dataset = setup.loadDataset().rdd.cache()
// Force loading of dataset, so not to measure the loading time too.
dataset.count()
val k = opts.k()
val ((solution, numberOfSamples), time) = timed {
println("Taking first sample")
var sample = dataset.takeSample(withReplacement = false, k)
var numSamples = 1
while (!setup.matroid.isIndependent(sample)) {
println(s"Taking sample $numSamples")
sample = dataset.takeSample(withReplacement = false, k)
numSamples += 1
}
(sample, numSamples)
}
require(solution.size == opts.k(), "Solution of wrong size")
experiment.append("result",
jMap(
"diversity" -> Diversity.clique(solution, setup.distance),
"number-of-samples" -> numberOfSamples,
"total-time" -> ExperimentUtil.convertDuration(time, TimeUnit.MILLISECONDS)))
for (wp <- solution) {
experiment.append("solution",
jMap(setup.pointToMap(wp).toSeq: _*))
}
case "local-search" =>
experiment.tag("gamma", opts.gamma())
val localDataset: Array[T] = setup.loadLocally()
val shuffledDataset = Random.shuffle(localDataset.toVector).toArray
val (solution, t) = timed {
LocalSearch.remoteClique[T](
shuffledDataset, opts.k(), opts.gamma(), setup.matroid, setup.distance)
}
require(solution.size == opts.k(), "Solution of wrong size")
experiment.append("times",
jMap(
"component" -> "local-search",
"time" -> ExperimentUtil.convertDuration(t, TimeUnit.MILLISECONDS)))
experiment.append("result",
jMap(
"diversity" -> Diversity.clique(solution, setup.distance),
"total-time" -> ExperimentUtil.convertDuration(t, TimeUnit.MILLISECONDS)))
for (wp <- solution) {
experiment.append("solution",
jMap(setup.pointToMap(wp).toSeq: _*))
}
case "streaming" =>
require(opts.tau.isDefined)
val tau = opts.tau()
val parallelism = opts.parallelism.get.getOrElse(spark.sparkContext.defaultParallelism)
experiment.tag("tau", tau)
var coresetSize: Option[Long] = None
val datasetIter = setup.loadDataset().toLocalIterator()
val dataset: mutable.ArrayBuffer[T] = mutable.ArrayBuffer()
while (datasetIter.hasNext()) {
dataset.append(datasetIter.next())
}
val shuffledDataset = Random.shuffle(dataset)
val streamingCoreset = Algorithm.streaming(
shuffledDataset.iterator, opts.k(), tau, setup.matroid, setup.distance, experiment)
val sizes = streamingCoreset.delegateSizes
val originalCoresetSize: Int = sizes.sum
println(s"Delegate set sizes:\\n${sizes.mkString("\\n")}")
for ((size, i) <- sizes.zipWithIndex) {
experiment.append("delegates",
jMap(
"componentId" -> i,
"size" -> size
)
)
}
val coreset =
if (opts.sparsify()) {
val (_c, _t) = timed {
MapReduceCoreset.run(
streamingCoreset.points.toArray,
tau, opts.k(), setup.matroid, setup.distance)
}
experiment.append("times",
jMap(
"component" -> "sparsification",
"time" -> ExperimentUtil.convertDuration(_t, TimeUnit.MILLISECONDS)))
println(s"Sparsified coreset has ${_c.length} points")
_c
} else {
streamingCoreset
}
val (solution, lsTime) = timed {
LocalSearch.remoteClique[T](
coreset.points,
opts.k(), opts.gamma(), setup.matroid, setup.distance)
}
require(solution.size == opts.k(), "Solution of wrong size")
require(setup.matroid.isIndependent(solution), "The solution is not an independent set!")
experiment.append("times",
jMap(
"component" -> "local-search",
"time" -> ExperimentUtil.convertDuration(lsTime, TimeUnit.MILLISECONDS)))
experiment.append("result",
jMap(
"coreset-size" -> originalCoresetSize,
"diversity" -> Diversity.clique(solution, setup.distance)))
for (wp <- solution) {
experiment.append("solution",
jMap(setup.pointToMap(wp).toSeq: _*))
}
case "mapreduce" =>
require(opts.tau.isDefined)
val tau = opts.tau()
val tauParallel = opts.tauP.get.getOrElse(tau)
val parallelism = opts.parallelism.get.getOrElse(spark.sparkContext.defaultParallelism)
experiment.tag("tau", tau)
experiment.tag("tau-parallel", tauParallel)
experiment.tag("num-partitions", parallelism)
experiment.tag("sparsify", opts.sparsify())
var coresetSize: Option[Long] = None
val dataset = setup.loadDataset().rdd.repartition(parallelism).glom().cache()
dataset.count()
val mrCoreset = Algorithm.mapReduce(
dataset, tauParallel, opts.k(), setup.matroid, setup.distance, experiment)
println(s"Computed coreset with ${mrCoreset.length} points and radius ${mrCoreset.radius}")
val coreset =
if (opts.sparsify()) {
val (_c, _t) = timed {
MapReduceCoreset.run(mrCoreset.points.toArray, tau, opts.k(), setup.matroid, setup.distance)
}
experiment.append("times",
jMap(
"component" -> "sparsification",
"time" -> ExperimentUtil.convertDuration(_t, TimeUnit.MILLISECONDS)))
println(s"Sparsified coreset has ${_c.length} points")
_c
} else {
mrCoreset
}
val (solution, lsTime) = timed {
LocalSearch.remoteClique[T](
coreset.points, opts.k(), 0.0, setup.matroid, setup.distance)
}
require(solution.size == opts.k(), "Solution of wrong size")
require(setup.matroid.isIndependent(solution), "The solution is not an independent set!")
experiment.append("times",
jMap(
"component" -> "local-search",
"time" -> ExperimentUtil.convertDuration(lsTime, TimeUnit.MILLISECONDS)))
experiment.append("result",
jMap(
"diversity" -> Diversity.clique(solution, setup.distance),
"large-coreset-size" -> mrCoreset.length,
"small-coreset-size" -> coreset.length))
for (wp <- solution) {
experiment.append("solution",
jMap(setup.pointToMap(wp).toSeq: _*))
}
case "sequential-coreset" =>
// To build the sequential coreset we are running the algorithm on the
// dataset coalesced to a single partition, on a worker node. This is
// in contrast to the natural approach of loading the entire dataset
// locally and then running the algorithm directly on it. In our
// experimental setup, doing the awkward thing is some 20% faster for
// some reason I still don't understand.
val gamma = opts.gamma.get.getOrElse(0.0)
experiment.tag("k'", opts.tau())
experiment.tag("tau", opts.tau())
experiment.tag("ls-subroutine-gamma", gamma)
val tau = opts.tau()
val k = opts.k()
val distance = setup.distance
var coresetSize: Option[Long] = None
val dataset = setup.loadDataset().rdd.repartition(1).glom().cache()
dataset.count()
val bMatroid = dataset.sparkContext.broadcast(setup.matroid)
val localDataset: Array[T] = setup.loadLocally()
//val shuffledDataset = Random.shuffle(localDataset.toVector).toArray
val ((solution, coresetTime, localSearchTime), totalTime) =
timed {
val (coreset, _coresetTime) = timed {
dataset.map { pointsArr =>
require(pointsArr.length > 0, "Cannot work on empty partitions!")
MapReduceCoreset.run(
pointsArr,
tau,
k,
bMatroid.value,
distance)
}.collect().apply(0)
// MapReduceCoreset.run(
// localDataset, opts.tau(), opts.k(), setup.matroid, setup.distance)
}
coresetSize = Some(coreset.length)
println(s"Built coreset with ${coreset.length} over ${localDataset.length} points")
val (sol, _lsTime) = timed {
LocalSearch.remoteClique[T](
coreset.points, opts.k(), gamma, setup.matroid, setup.distance)
}
(sol, _coresetTime, _lsTime)
}
require(solution.size == opts.k(), "Solution of wrong size")
experiment.append("times",
jMap(
"component" -> "local-search",
"time" -> ExperimentUtil.convertDuration(localSearchTime, TimeUnit.MILLISECONDS)))
experiment.append("times",
jMap(
"component" -> "coreset",
"time" -> ExperimentUtil.convertDuration(coresetTime, TimeUnit.MILLISECONDS)))
experiment.append("result",
jMap(
"diversity" -> Diversity.clique(solution, setup.distance),
"coreset-size" -> coresetSize.get,
"total-time" -> ExperimentUtil.convertDuration(totalTime, TimeUnit.MILLISECONDS),
"coreset-time" -> ExperimentUtil.convertDuration(coresetTime, TimeUnit.MILLISECONDS),
"local-search-time" -> ExperimentUtil.convertDuration(localSearchTime, TimeUnit.MILLISECONDS)))
for (wp <- solution) {
experiment.append("solution",
jMap(setup.pointToMap(wp).toSeq: _*))
}
case "clustering-radius" =>
require(opts.epsilon.isDefined)
experiment.tag("epsilon", opts.epsilon())
val localDataset: Array[T] = setup.loadLocally()
val coreset = withRadiusExp[T](
localDataset, opts.epsilon(), Random.nextInt(localDataset.length), setup.distance, experiment)
}
}
class Opts(args: Array[String]) extends ScallopConf(args) {
lazy val algorithm = opt[String](default = Some("local-search"))
lazy val k = opt[Int](name="target", short='k', required = true)
lazy val gamma = opt[Double](default = Some(0.0))
lazy val tau = opt[Int]()
lazy val tauP = opt[Int]()
lazy val parallelism = opt[Int]()
lazy val epsilon = opt[Double]()
lazy val sparsify = toggle(
default=Some(false),
descrYes = "whether to sparsify the coreset resulting from the MapReduce algorithm")
lazy val input = opt[String](required = true)
lazy val categories = opt[String](required = false, argName = "FILE")
lazy val genres = opt[String](required = false, argName = "FILE")
lazy val topics = toggle()
lazy val uniform = opt[String](argName = "DATA TYPE", validate = Set("glove").contains,
descr = "Use a cardinality matroid, and specify the data type")
lazy val diameter = opt[Double](required = false, argName = "DELTA")
}
// Quick and dirty experiment to check how the radius decreases when doing a clustering
def withRadiusExp[T: ClassTag](points: IndexedSeq[T],
epsilon: Double,
startIdx: Int,
distance: (T, T) => Double,
experiment: Experiment): IndexedSeq[T] = {
val n = points.size
val minDist = Array.fill(n)(Double.PositiveInfinity)
val centers = IndexedSubset.apply(points)
// Init the result with an arbitrary point
centers.add(startIdx)
var i = 0
var radius: Double = 0d
var nextCenter = 0
while (i < n) {
val d = distance(points(startIdx), points(i))
minDist(i) = d
if (d > radius) {
radius = d
nextCenter = i
}
i += 1
}
experiment.append("clustering-radius",
jMap(
"iteration" -> 0,
"radius" -> radius))
var iteration = 1
while (radius > epsilon && centers.size != n) {
val center = nextCenter
centers.add(center)
radius = 0.0
i = 0
// Re-compute the radius and find the farthest node
while (i < n) {
val d = distance(points(center), points(i))
if (d < minDist(i)) {
minDist(i) = d
}
if (minDist(i) > radius) {
radius = minDist(i)
nextCenter = i
}
i += 1
}
println(s"[$iteration] r=$radius")
experiment.append("clustering-radius",
jMap(
"iteration" -> iteration,
"radius" -> radius))
iteration += 1
}
centers.toVector
}
}
| Cecca/diversity-maximization | experiments/src/main/scala/it/unipd/dei/diversity/MainMatroid.scala | Scala | gpl-3.0 | 16,208 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package config
import java.time.Instant
import mocks.Mock
import org.mockito.{ArgumentCaptor, ArgumentMatchers}
import org.scalatestplus.play.guice.GuiceOneAppPerSuite
import play.api.Configuration
import play.api.http.Status
import play.api.libs.json.{Json, OFormat}
import play.api.mvc.RequestHeader
import play.api.test.FakeRequest
import router.errors.ErrorCode
import support.UnitSpec
import uk.gov.hmrc.auth.core.InsufficientEnrolments
import uk.gov.hmrc.http.{HeaderCarrier, JsValidationException, NotFoundException}
import uk.gov.hmrc.play.audit.http.connector.AuditConnector
import uk.gov.hmrc.play.audit.http.connector.AuditResult.Success
import uk.gov.hmrc.play.audit.model.DataEvent
import uk.gov.hmrc.play.bootstrap.backend.http.ErrorResponse
import uk.gov.hmrc.play.bootstrap.config.HttpAuditEvent
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.{ExecutionContext, Future}
import scala.util.control.NoStackTrace
/**
* Unit tests for [[ErrorHandler]]
*/
class ErrorHandlerSpec extends UnitSpec with Mock with GuiceOneAppPerSuite {
implicit val erFormats: OFormat[ErrorResponse] = Json.format[ErrorResponse]
def versionHeader(version: String): (String, String) = ACCEPT -> s"application/vnd.hmrc.$version+json"
class Test(versionInHeader: Option[String]) {
val method = "some-method"
val requestHeader = FakeRequest().withHeaders(versionInHeader.map(versionHeader).toSeq: _*)
val auditConnector = mock[AuditConnector]
val httpAuditEvent = mock[HttpAuditEvent]
val eventTags: Map[String, String] = Map("transactionName" -> "event.transactionName")
def dataEvent(auditType: String) = DataEvent(
auditSource = "auditSource",
auditType = auditType,
eventId = "",
tags = eventTags,
detail = Map("test" -> "test"),
generatedAt = Instant.now()
)
def mockAuditCall(expectedAuditType: String) = {
when(httpAuditEvent.dataEvent(ArgumentMatchers.eq(expectedAuditType), any[String](), any[RequestHeader](), any[Map[String, String]]())(any[HeaderCarrier]()))
.thenReturn(dataEvent(expectedAuditType))
}
mockAuditCall("ResourceNotFound")
mockAuditCall("ClientError")
mockAuditCall("ServerValidationError")
mockAuditCall("ServerInternalError")
when(auditConnector.sendEvent(any[DataEvent]())(any[HeaderCarrier](), any[ExecutionContext]()))
.thenReturn(Future.successful(Success))
val configuration = Configuration(
"appName" -> "myApp",
"bootstrap.errorHandler.warnOnly.statusCodes" -> List.empty
)
val handler = new ErrorHandler(configuration, auditConnector, httpAuditEvent)
}
"onClientError" should {
Seq(Some("1.0"),
Some("8.0"),
Some("XXX"),
None)
.foreach(behaveAsVersion1)
def behaveAsVersion1(versionInHeader: Option[String]): Unit =
"return 404 with version 1 error body" when {
s"version header is $versionInHeader" in new Test(versionInHeader) {
val result = handler.onClientError(requestHeader, Status.NOT_FOUND, "test")
status(result) shouldBe Status.NOT_FOUND
contentAsJson(result) shouldBe Json.parse(s"""{"statusCode":404,"message":"URI not found", "requested": "${requestHeader.path}"}""")
val captor: ArgumentCaptor[DataEvent] = ArgumentCaptor.forClass(classOf[DataEvent])
verify(auditConnector, times(1)).sendEvent(captor.capture)(any[HeaderCarrier](), any[ExecutionContext]())
captor.getValue.auditType shouldBe "ResourceNotFound"
}
}
"return 404 with version 2 error body" when {
"resource not found and version 2 header is supplied" in new Test(Some("2.0")) {
val result = handler.onClientError(requestHeader, NOT_FOUND, "test")
status(result) shouldBe NOT_FOUND
contentAsJson(result) shouldBe Json.toJson(ErrorCode.matchingResourceNotFound)
val captor: ArgumentCaptor[DataEvent] = ArgumentCaptor.forClass(classOf[DataEvent])
verify(auditConnector, times(1)).sendEvent(captor.capture)(any[HeaderCarrier](), any[ExecutionContext]())
captor.getValue.auditType shouldBe "ResourceNotFound"
}
}
"return 401 with version 2 error body" when {
"unauthorised and version 2 header is supplied" in new Test(Some("2.0")) {
val result = handler.onClientError(requestHeader, UNAUTHORIZED, "test")
status(result) shouldBe UNAUTHORIZED
contentAsJson(result) shouldBe Json.toJson(ErrorCode.unauthorisedError)
val captor: ArgumentCaptor[DataEvent] = ArgumentCaptor.forClass(classOf[DataEvent])
verify(auditConnector, times(1)).sendEvent(captor.capture)(any[HeaderCarrier](), any[ExecutionContext]())
captor.getValue.auditType shouldBe "ClientError"
}
}
}
"onServerError" should {
Seq(Some("1.0"),
Some("8.0"),
Some("XXX"),
None)
.foreach(behaveAsVersion1)
def behaveAsVersion1(versionInHeader: Option[String]): Unit =
"return 404 with version 1 error body" when {
s"version header is $versionInHeader" in new Test(versionInHeader) {
val resultF = handler.onServerError(requestHeader, new NotFoundException("test") with NoStackTrace)
status(resultF) shouldEqual NOT_FOUND
contentAsJson(resultF) shouldEqual Json.parse("""{"statusCode":404,"message":"test"}""")
val captor: ArgumentCaptor[DataEvent] = ArgumentCaptor.forClass(classOf[DataEvent])
verify(auditConnector, times(1)).sendEvent(captor.capture)(any[HeaderCarrier](), any[ExecutionContext]())
captor.getValue.auditType shouldBe "ResourceNotFound"
}
}
"return 404 with version 2 error body" when {
"NotFoundException thrown and version 2 header is supplied" in new Test(Some("2.0")) {
val result = handler.onServerError(requestHeader, new NotFoundException("test") with NoStackTrace)
status(result) shouldBe NOT_FOUND
contentAsJson(result) shouldBe Json.toJson(ErrorCode.matchingResourceNotFound)
val captor: ArgumentCaptor[DataEvent] = ArgumentCaptor.forClass(classOf[DataEvent])
verify(auditConnector, times(1)).sendEvent(captor.capture)(any[HeaderCarrier](), any[ExecutionContext]())
captor.getValue.auditType shouldBe "ResourceNotFound"
}
}
"return 401 with version 2 error body" when {
"AuthorisationException thrown and version 2 header is supplied" in new Test(Some("2.0")) {
val result = handler.onServerError(requestHeader, new InsufficientEnrolments("test") with NoStackTrace)
status(result) shouldBe UNAUTHORIZED
contentAsJson(result) shouldBe Json.toJson(ErrorCode.unauthorisedError)
val captor: ArgumentCaptor[DataEvent] = ArgumentCaptor.forClass(classOf[DataEvent])
verify(auditConnector, times(1)).sendEvent(captor.capture)(any[HeaderCarrier](), any[ExecutionContext]())
captor.getValue.auditType shouldBe "ClientError"
}
"return 400 with version 2 error body" when {
"JsValidationException thrown and version 2 header is supplied" in new Test(Some("2.0")) {
val result = handler.onServerError(requestHeader, new JsValidationException("test", "test", classOf[String], "errs") with NoStackTrace)
status(result) shouldBe BAD_REQUEST
contentAsJson(result) shouldBe Json.toJson(ErrorCode.invalidRequest)
val captor: ArgumentCaptor[DataEvent] = ArgumentCaptor.forClass(classOf[DataEvent])
verify(auditConnector, times(1)).sendEvent(captor.capture)(any[HeaderCarrier](), any[ExecutionContext]())
captor.getValue.auditType shouldBe "ServerValidationError"
}
}
"return 500 with version 2 error body" when {
"other exeption thrown and version 2 header is supplied" in new Test(Some("2.0")) {
val result = handler.onServerError(requestHeader, new Exception with NoStackTrace)
status(result) shouldBe INTERNAL_SERVER_ERROR
contentAsJson(result) shouldBe Json.toJson(ErrorCode.internalServerError)
val captor: ArgumentCaptor[DataEvent] = ArgumentCaptor.forClass(classOf[DataEvent])
verify(auditConnector, times(1)).sendEvent(captor.capture)(any[HeaderCarrier](), any[ExecutionContext]())
captor.getValue.auditType shouldBe "ServerInternalError"
}
}
}
}
}
| hmrc/self-assessment-api | test/config/ErrorHandlerSpec.scala | Scala | apache-2.0 | 9,039 |
package controllers
import
play.api.{ libs, mvc },
libs.json.Json.toJson,
mvc.{ Controller, RequestHeader }
import
controllers.action.APIAction
import
models.util.{ NetUtil, ModelMapper }
/**
* Created by IntelliJ IDEA.
* User: Jason
* Date: 6/13/12
* Time: 3:43 PM
*/
object Models extends Controller {
private val PlainType = "plain"
private val JsonType = "json"
def modelNames(responseType: String) = APIAction {
val names = ModelMapper.modelNames
responseType match {
case PlainType => Ok(names.mkString("", "\\n", "\\n"))
case JsonType => Ok(toJson(names map (toJson(_))) + "\\n")
case x => BadRequest("Unrecognized response type requested: " + x + "\\n")
}
}
protected[controllers] def getHubNetModelURL(modelName: String)(implicit request: RequestHeader): String = {
val name = urlify(ModelMapper.unalias(modelName))
val ModelAssetURLFormat = "models/hubnet/%s.nlogo".format(_: String)
routes.LameAssets.at(ModelAssetURLFormat(name)).absoluteURL(false)
}
// Takes model names and converts them to something that can be referenced as a URL
private def urlify(name: String): String = NetUtil.encodeForURL(name)
}
| NetLogo/SimServer | app/controllers/Models.scala | Scala | gpl-2.0 | 1,212 |
package jp.co.cyberagent.aeromock.cli
import java.nio.file.Path
import jp.co.cyberagent.aeromock.AeromockTestModule
import jp.co.cyberagent.aeromock.cli.job.ValidationJob
import jp.co.cyberagent.aeromock.cli.option.CliCommand
import jp.co.cyberagent.aeromock.config.definition.ProjectDef
import jp.co.cyberagent.aeromock.test.SpecSupport
import org.specs2.mutable.{Tables, Specification}
/**
*
* @author stormcat24
*/
class CliJobSelectorSpec extends Specification with Tables with SpecSupport {
"select" in {
implicit val module = new AeromockTestModule {
override val projectConfigPath: Path = getResourcePath(".").resolve("test/project.yaml").toRealPath()
override val projectDefArround = (projectDef: ProjectDef) => {}
} ++ new AeromockCliModule
val command = CliCommand(Array("test"))
val selector = inject[CliJobSelector]
selector.select(command).getClass must_== classOf[ValidationJob]
}
}
| CyberAgent/aeromock | aeromock-cli/src/test/scala/jp/co/cyberagent/aeromock/cli/CliJobSelectorSpec.scala | Scala | mit | 942 |
/*
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.config.scala
import com.netflix.config.DynamicPropertyFactory
import java.lang.{Integer => jInt}
/**
* User: gorzell
* Date: 8/6/12
*/
object DynamicIntProperty {
def apply(propertyName: String, defaultValue: Int) =
new DynamicIntProperty(propertyName, defaultValue)
def apply(propertyName: String, defaultValue: Int, callback: () => Unit) = {
val p = new DynamicIntProperty(propertyName, defaultValue)
p.addCallback(callback)
p
}
}
class DynamicIntProperty(
override val propertyName: String,
override val defaultValue: Int)
extends DynamicProperty[Int]
{
override protected val box = new PropertyBox[Int, jInt] {
override val prop = DynamicPropertyFactory.getInstance().getIntProperty(propertyName, defaultValue)
def convert(jt: jInt): Int = jt
}
}
| gorzell/archaius | archaius-scala/src/main/scala/com/netflix/config/scala/DynamicIntProperty.scala | Scala | apache-2.0 | 1,413 |
package me.ukl
object DCT {
//Table of values for c0/c1 for used in sumDCT and sumInverseDCT.
private val cTable = (for (x <- 0 to 7; u <- 0 to 7) yield {
Math.cos(((2 * x + 1) * u * Math.PI) / 16.0)
}).toArray
def applyFullDCT(xs: Array[Int], qMat: Array[Int]) = {
val shifted = for (x <- xs) yield x - 128
val dct = applyDCT(shifted)
quantize(dct, qMat, xs)
}
/**
* Computes the discrete cosine transform of the input data
* See formula here http://en.wikipedia.org/wiki/JPEG#Discrete_cosine_transform
*/
private def applyDCT(xs: Array[Int]) = {
for (v <- 0 to 7; u <- 0 to 7) yield {
val aU = alpha(u)
val aV = alpha(v)
0.25 * aU * aV * sumDCT(xs, u, v)
}
}
//Multiplied by the value at (u,v) for summing
//Converted to array because that makes it much faster to access
private val sumTable = (for (v <- 0 to 7; u <- 0 to 7; y <- 0 to 7; x <- 0 to 7) yield {
cTable(x * 8 + u) * cTable(y * 8 + v)
}).toArray
private def sumDCT(xs: Array[Int], u: Int, v: Int) = {
var sum = 0.0;
val sumTableOffs = v * (8 * 8 * 8) + u * (8 * 8)
for (y <- 0 to 7; x <- 0 to 7) {
val i = x + y * 8
sum += xs(i) * sumTable(sumTableOffs + i)
}
sum
}
private def alpha(u: Int) = {
if (u == 0)
0.7071067811865475 // 1 / sqrt(2)
else
1.0
}
private def quantize(dct: Seq[Double], qMat: Array[Int], out: Array[Int]) = {
var i = 0
for (in <- dct) {
out(i) = Math.round(in / qMat(i).toDouble).toInt
i += 1
}
}
def applyFullInverseDCT(xs: Array[Int], qMat: Array[Int]) = {
val in = (for (i <- 0 until xs.length) yield xs(i) * qMat(i)).toArray
val invDCT = applyInverseDCT(in)
var i = 0
for (in <- invDCT) {
xs(i) = Math.max(Math.min(127, in), -128) + 128
i += 1
}
}
private def applyInverseDCT(in: Array[Int]) = {
for (y <- 0 to 7; x <- 0 to 7) yield sumInverseDCT(in, x, y).toInt
}
//Multiplied by the value at (x,y) for summing
//Converted to array because that makes it much faster to access
private val inverseSumTable = (for (y <- 0 to 7; x <- 0 to 7; v <- 0 to 7; u <- 0 to 7) yield {
val aU = alpha(u)
val aV = alpha(v)
val c0 = cTable(x * 8 + u)
val c1 = cTable(y * 8 + v)
aU * aV * c0 * c1
}).toArray
private def sumInverseDCT(in: Array[Int], x: Int, y: Int) = {
val sumTableOffs = y * (8 * 8 * 8) + x * (8 * 8)
var sum = 0.0
for (v <- 0 to 7; u <- 0 to 7) {
val i = u + v * 8
sum += in(i) * inverseSumTable(sumTableOffs + i)
}
sum * 0.25
}
}
| unknownloner/ImageCompress | src/me/ukl/DCT.scala | Scala | mit | 2,647 |
package edu.berkeley.cs.succinct.sql.impl
import edu.berkeley.cs.succinct.SuccinctTable
import edu.berkeley.cs.succinct.SuccinctTable.QueryType
import edu.berkeley.cs.succinct.sql._
import edu.berkeley.cs.succinct.util.SuccinctConstants
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.Row
import org.apache.spark.sql.sources._
import org.apache.spark.sql.types.{Decimal, StructType}
import org.apache.spark.storage.StorageLevel
import org.apache.spark.succinct.sql.SuccinctTablePartition
import org.apache.spark.{OneToOneDependency, Partition, TaskContext}
/**
* Implements [[SuccinctTableRDD]]; provides implementations for the count and search methods.
*
* @constructor Creates a [[SuccinctTableRDD]] from an RDD of [[SuccinctTable]] partitions,
* the list of separators and the target storage level.
* @param partitionsRDD The RDD of partitions (SuccinctTablePartition).
* @param separators The list of separators for distinguishing between attributes.
* @param schema The schema for [[SuccinctTableRDD]]
* @param targetStorageLevel The target storage level for the RDD.
*/
class SuccinctTableRDDImpl private[succinct](
val partitionsRDD: RDD[SuccinctTablePartition],
val separators: Array[Byte],
val schema: StructType,
val minimums: Row,
val maximums: Row,
val succinctSerializer: SuccinctSerDe,
val targetStorageLevel: StorageLevel = StorageLevel.MEMORY_ONLY)
extends SuccinctTableRDD(partitionsRDD.context, List(new OneToOneDependency(partitionsRDD))) {
val recordCount = partitionsRDD.map(_.count).aggregate(0L)(_ + _, _ + _)
/** Overrides compute to return an iterator over Succinct's representation of rows. */
override def compute(split: Partition, context: TaskContext): Iterator[Row] = {
val succinctIterator = firstParent[SuccinctTablePartition].iterator(split, context)
if (succinctIterator.hasNext) {
succinctIterator.next().iterator
} else {
Iterator[Row]()
}
}
/** Set the name for the RDD; By default set to "SuccinctTableRDD". */
override def setName(_name: String): this.type = {
if (partitionsRDD.name != null) {
partitionsRDD.setName(partitionsRDD.name + ", " + _name)
} else {
partitionsRDD.setName(_name)
}
this
}
setName("SuccinctTableRDD")
/**
* Persists the Succinct partitions at the specified storage level, ignoring any existing target
* storage level.
*/
override def persist(newLevel: StorageLevel): this.type = {
partitionsRDD.persist(newLevel)
this
}
/** Un-persists the Succinct partitions using the specified blocking mode. */
override def unpersist(blocking: Boolean = true): this.type = {
partitionsRDD.unpersist(blocking)
this
}
/** Persists the Succinct partitions at `targetStorageLevel`, which defaults to MEMORY_ONLY. */
override def cache(): this.type = {
partitionsRDD.persist(targetStorageLevel)
this
}
/** Implements pruneAndFilter for [[SuccinctTableRDD]]. */
override def pruneAndFilter(requiredColumns: Array[String], filters: Array[Filter]): RDD[Row] = {
val reqColsCheck = schema.map(f => f.name -> requiredColumns.contains(f.name)).toMap
val queryList = filtersToQueries(filters)
val queryTypes = queryList.map(_._1)
val queries = queryList.map(_._2)
if (queries.length == 0)
if (requiredColumns.length == schema.length)
this
else
partitionsRDD.flatMap(_.prune(reqColsCheck))
else
partitionsRDD.flatMap(_.pruneAndFilter(reqColsCheck, queryTypes, queries))
}
/** Implements save for [[SuccinctTableRDD]] */
override def save(path: String): Unit = {
val dataPath = path.stripSuffix("/") + "/data"
val schemaPath = path.stripSuffix("/") + "/schema"
val separatorsPath = path.stripSuffix("/") + "/separators"
val minPath = path.stripSuffix("/") + "/min"
val maxPath = path.stripSuffix("/") + "/max"
val conf = new Configuration()
val fs = FileSystem.get(new Path(path.stripSuffix("/")).toUri, conf)
fs.mkdirs(new Path(dataPath))
SuccinctUtils.writeObjectToFS(conf, schemaPath, schema)
SuccinctUtils.writeObjectToFS(conf, separatorsPath, separators)
SuccinctUtils.writeObjectToFS(conf, minPath, minimums)
SuccinctUtils.writeObjectToFS(conf, maxPath, maximums)
partitionsRDD.zipWithIndex().foreach(entry => {
val i = entry._2
val partition = entry._1
val partitionLocation = dataPath + "/part-" + "%05d".format(i)
val path = new Path(partitionLocation)
val fs = FileSystem.get(path.toUri, new Configuration())
val os = fs.create(path)
partition.writeToStream(os)
os.close()
})
fs.create(new Path(s"${path.stripSuffix("/")}/_SUCCESS")).close()
}
/**
* Get the count of the number of records in the RDD.
*
* @return The count of the number of records in the RDD.
*/
override def count(): Long = {
recordCount
}
/** Implements createQuery for [[SuccinctTableRDD]] */
private[sql] def createQuery(attrIdx: Int, query: Array[Byte]): Array[Byte] = {
getSeparator(attrIdx) +: query :+ getSeparator(attrIdx + 1)
}
/** Implements createQuery for [[SuccinctTableRDD]] */
private[sql] def createQuery(attribute: String, query: Array[Byte]): Array[Byte] = {
val attrIdx = getAttrIdx(attribute)
createQuery(attrIdx, query)
}
/** Implements getAttrIdx for [[SuccinctTableRDD]] */
private[sql] def getAttrIdx(attribute: String): Int = schema.lastIndexOf(schema(attribute))
/** Implements getSeparator for [[SuccinctTableRDD]] */
private[sql] def getSeparator(attrIdx: Int): Byte = {
if (attrIdx == separators.length) SuccinctConstants.EOL.toByte
else separators(attrIdx)
}
/**
* Converts filters to queries for SuccinctIndexedBuffer's recordMultiSearchIds.
*
* @param filters Array of filters to be applied.
* @return Array of queries.
*/
private[sql] def filtersToQueries(filters: Array[Filter]): Array[(QueryType, Array[Array[Byte]])] = {
filters.filter(isFilterSupported).map {
case StringStartsWith(attribute, value) =>
(QueryType.Search, Array[Array[Byte]](createPrefixQuery(attribute, value.getBytes)))
case StringEndsWith(attribute, value) =>
(QueryType.Search, Array[Array[Byte]](createSuffixQuery(attribute, value.getBytes)))
case StringContains(attribute, value) =>
(QueryType.Search, Array[Array[Byte]](value.getBytes))
case EqualTo(attribute, value) =>
val attrIdx = getAttrIdx(attribute)
val query = succinctSerializer.typeToString(attrIdx, value).getBytes
(QueryType.Search, Array[Array[Byte]](createQuery(attribute, query)))
case LessThanOrEqual(attribute, value) =>
val attrIdx = getAttrIdx(attribute)
val mValue = SuccinctTableRDD.minValue(value, maximums.get(attrIdx))
val minValue = succinctSerializer.typeToString(attrIdx, minimums.get(attrIdx)).getBytes
val maxValue = succinctSerializer.typeToString(attrIdx, mValue).getBytes
val queryBegin = createQuery(attrIdx, minValue)
val queryEnd = createQuery(attrIdx, maxValue)
(QueryType.RangeSearch, Array[Array[Byte]](queryBegin, queryEnd))
case GreaterThanOrEqual(attribute, value) =>
val attrIdx = getAttrIdx(attribute)
val mValue = SuccinctTableRDD.maxValue(value, minimums.get(attrIdx))
val minValue = succinctSerializer.typeToString(attrIdx, mValue).getBytes
val maxValue = succinctSerializer.typeToString(attrIdx, maximums.get(attrIdx)).getBytes
val queryBegin = createQuery(attrIdx, minValue)
val queryEnd = createQuery(attrIdx, maxValue)
(QueryType.RangeSearch, Array[Array[Byte]](queryBegin, queryEnd))
case LessThan(attribute, value) =>
val attrIdx = getAttrIdx(attribute)
val mValue = SuccinctTableRDD.minValue(value, maximums.get(attrIdx))
val minValue = succinctSerializer.typeToString(attrIdx, minimums.get(attrIdx)).getBytes
val maxValue = succinctSerializer.typeToString(attrIdx, prevValue(mValue)).getBytes
val queryBegin = createQuery(attrIdx, minValue)
val queryEnd = createQuery(attrIdx, maxValue)
(QueryType.RangeSearch, Array[Array[Byte]](queryBegin, queryEnd))
case GreaterThan(attribute, value) =>
val attrIdx = getAttrIdx(attribute)
val mValue = SuccinctTableRDD.maxValue(value, minimums.get(attrIdx))
val minValue = succinctSerializer.typeToString(attrIdx, nextValue(mValue)).getBytes
val maxValue = succinctSerializer.typeToString(attrIdx, maximums.get(attrIdx)).getBytes
val queryBegin = createQuery(attrIdx, minValue)
val queryEnd = createQuery(attrIdx, maxValue)
(QueryType.RangeSearch, Array[Array[Byte]](queryBegin, queryEnd))
}
}
/** Implements createPrefixQuery for [[SuccinctTableRDD]] */
private[sql] def createPrefixQuery(attribute: String, query: Array[Byte]): Array[Byte] = {
val attrIdx = schema.lastIndexOf(schema(attribute))
getSeparator(attrIdx) +: query
}
/** Implements createSuffixQuery for [[SuccinctTableRDD]] */
private[sql] def createSuffixQuery(attribute: String, query: Array[Byte]): Array[Byte] = {
val attrIdx = schema.lastIndexOf(schema(attribute))
query :+ getSeparator(attrIdx + 1)
}
/**
* Check if a filter is supported directly by Succinct data structures.
*
* @param f Filter to check.
* @return Returns true if the filter is supported;
* false otherwise.
*/
private[sql] def isFilterSupported(f: Filter): Boolean = f match {
case StringStartsWith(attribute, value) => true
case StringEndsWith(attribute, value) => true
case StringContains(attribute, value) => true
case EqualTo(attribute, value) => true
case LessThan(attribute, value) => true
case LessThanOrEqual(attribute, value) => true
case GreaterThan(attribute, value) => true
case GreaterThanOrEqual(attribute, value) => true
/** Not supported: In, IsNull, IsNotNull, And, Or, Not */
case _ => false
}
/**
* Gives the previous value for an input value.
*
* @param data The input value.
* @return The previous value.
*/
private[sql] def prevValue(data: Any): Any = {
data match {
case _: Boolean => !data.asInstanceOf[Boolean]
case _: Byte => data.asInstanceOf[Byte] - 1
case _: Short => data.asInstanceOf[Short] - 1
case _: Int => data.asInstanceOf[Int] - 1
case _: Long => data.asInstanceOf[Long] - 1
case _: Float => data.asInstanceOf[Float] - Float.MinPositiveValue
case _: Double => data.asInstanceOf[Double] - Double.MinPositiveValue
case _: java.math.BigDecimal => data.asInstanceOf[java.math.BigDecimal]
case _: BigDecimal => data.asInstanceOf[BigDecimal]
case _: Decimal => data.asInstanceOf[Decimal]
case _: String => data.asInstanceOf[String]
case other => throw new IllegalArgumentException(s"Unexpected type.")
}
}
/**
* Gives the next value for an input value.
*
* @param data The input value.
* @return The next value.
*/
private[sql] def nextValue(data: Any): Any = {
data match {
case _: Boolean => !data.asInstanceOf[Boolean]
case _: Byte => data.asInstanceOf[Byte] + 1
case _: Short => data.asInstanceOf[Short] + 1
case _: Int => data.asInstanceOf[Int] + 1
case _: Long => data.asInstanceOf[Long] + 1
case _: Float => data.asInstanceOf[Float] + Float.MinPositiveValue
case _: Double => data.asInstanceOf[Double] + Double.MinPositiveValue
case _: java.math.BigDecimal => data.asInstanceOf[java.math.BigDecimal]
case _: Decimal => data.asInstanceOf[Decimal]
case _: String => data.asInstanceOf[String]
case other => throw new IllegalArgumentException(s"Unexpected type.")
}
}
}
| amplab/succinct | spark/src/main/scala/edu/berkeley/cs/succinct/sql/impl/SuccinctTableRDDImpl.scala | Scala | apache-2.0 | 12,364 |
package org.scalameter
import org.scalameter.Measurer._
import org.scalameter.execution.invocation.InvocationCountMatcher
import org.scalameter.execution.invocation.InvocationCountMatcher.{MethodMatcher, ClassMatcher}
import org.scalameter.execution.invocation.instrumentation.MethodSignature
import scala.util.Try
class InvocationCountTest extends MeasurerTest[Map[String, Long], InvocationCount] {
test("BoxingCount with single type should count only autoboxing of this type") {
measureWith(BoxingCount(classOf[Double])) {
List(1.0, 2f, true, 6.0, "aaaa", Nil)
} (_.valuesIterator.sum should === (2))
measureWith(BoxingCount(classOf[Double])) {
List(1.0, 2f, true, 6.0, "aaaa", Nil, 56.7d)
} (_.valuesIterator.sum should === (3))
}
test("BoxingCount.all() should count autoboxing of all primitive types") {
measureWith(BoxingCount.all()) {
List(1.0d, 2f, true, 5: Byte, -125: Short, 5754, 432523l, 'a', "aaaa", Nil)
} (_.valuesIterator.sum should === (8))
measureWith(BoxingCount.all()) {
List(1.0d, 2f, true, 5: Byte, -125: Short, 5754, 432523l, 'a', "aaaa", Nil, 'b')
} (_.valuesIterator.sum should === (9))
}
test("MethodInvocationCount with matcher without any method pattern should count specific class allocations") {
measureWith(MethodInvocationCount(InvocationCountMatcher.allocations(classOf[Range]))) {
val r = 0 until 10
r.map(_ + 1)
1 to 10
new Range(0, 9, 1)
new Range(-1, 1, 1) ++ List(1, 2, 3)
List(5, 6, 7, 8, 9)
} (_.valuesIterator.sum should === (4))
measureWith(MethodInvocationCount(InvocationCountMatcher.allocations(classOf[Range]))) {
val r = 0 until 10
r.map(_ + 1)
1 to 10
new Range(0, 9, 1)
new Range(-1, 1, 1) ++ List(1, 2, 3)
List(5, 6, 7, 8, 9)
11 to 20
} (_.valuesIterator.sum should === (5))
}
test("MethodInvocationCount with ClassMatcher.Descendants should match all descendants") {
measureWith(new MethodInvocationCount(InvocationCountMatcher(
classMatcher = ClassMatcher.Descendants(classOf[List[_]], direct = false, withSelf = false),
methodMatcher = MethodMatcher.MethodName("head")
))) {
List(1, 2, 3).head
Vector(5, 6).head
Set(1, 4).head
Try(Nil.head)
} (_.valuesIterator.sum should === (2))
measureWith(new MethodInvocationCount(InvocationCountMatcher(
classMatcher = ClassMatcher.Descendants(classOf[collection.Seq[_]], direct = false, withSelf = false),
methodMatcher = MethodMatcher.MethodName("head")
))) {
List(1, 2, 3).head
Vector(5, 6).head
Set(1, 4).head
Try(Nil.head)
} (_.valuesIterator.sum should === (3))
measureWith(new MethodInvocationCount(InvocationCountMatcher(
classMatcher = ClassMatcher.Descendants(classOf[collection.Seq[_]], direct = false, withSelf = false),
methodMatcher = MethodMatcher.MethodName("head")
))) {
List(1, 2, 3).head
Vector(5, 6).head
Set(1, 4).head
Try(Nil.head)
Stream(7, 8, 9).head
} (_.valuesIterator.sum should === (4))
}
}
| kjanosz/scalameter | src/test/scala/org/scalameter/InvocationCountTest.scala | Scala | bsd-3-clause | 3,139 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.rudder.web.snippet.node
import com.normation.inventory.ldap.core.InventoryHistoryLogRepository
import com.normation.rudder.web.services.DisplayNode
import com.normation.rudder.web.model.JsNodeId
import com.normation.inventory.domain.NodeId
import com.normation.inventory.ldap.core.LDAPConstants._
import com.normation.ldap.sdk._
import BuildFilter._
import bootstrap.liftweb.LiftSpringApplicationContext.inject
import org.joda.time.DateTime
import org.slf4j.LoggerFactory
import org.joda.time.format.ISODateTimeFormat
import scala.collection.mutable.{Map => MutMap}
import scala.xml._
import net.liftweb.common._
import net.liftweb.http._
import net.liftweb.util._
import Helpers._
import net.liftweb.http.js._
import JsCmds._
import JE._
import net.liftweb.http.SHtml._
import bootstrap.liftweb.RudderConfig
import com.normation.inventory.domain.AcceptedInventory
import com.normation.inventory.domain.FullInventory
import com.normation.rudder.domain.nodes.{Node => RudderNode}
import com.normation.rudder.reports.ReportingConfiguration
/**
* A simple service that displays a NodeDetail widget from
* a list of LDIF entries
*/
class NodeHistoryViewer extends StatefulSnippet {
lazy val diffRepos = RudderConfig.inventoryHistoryLogRepository
var uuid : NodeId = null
var selectedDate : DateTime = null
var dates : Seq[(DateTime,String)] = Seq()
//id of html element to update
var hid = ""
var dispatch : DispatchIt = {
case "render" => render _
}
def render(xml:NodeSeq) : NodeSeq = {
S.attr("uuid") match {
case Full(s) => //new id of id change, init for that id
initState(s)
<div>
<p>{SHtml.ajaxSelectObj[DateTime](dates, Full(selectedDate), onSelect _)}</p>
{ diffRepos.get(uuid, selectedDate) match {
case Failure(m,_,_) => <div class="error">Error while trying to display node history. Error message: {m}</div>
case Empty => <div class="error">No history was retrieved for the chosen date</div>
case Full(sm) =>
<div id={hid}>{DisplayNode.showPannedContent(
None
, sm.data
, AcceptedInventory
, "hist"
) ++ Script(DisplayNode.jsInit(sm.data.node.main.id,sm.data.node.softwareIds,"hist", Some("node_tabs")))}</div>
} }
</div>
case _ => <div class="error">Missing node ID information: can not display history information</div>
}
}
private def initState(suuid:String) : Unit = {
val newUuid = NodeId(suuid)
if(newUuid != this.uuid) {
this.uuid = newUuid
this.hid = JsNodeId(uuid,"hist_").toString
this.dates = diffRepos.versions(uuid).get. //TODO : manage errors here
map(d => (d, d.toString()))
if(dates.nonEmpty) { selectedDate = dates.head._1 }
}
//if a version is available, try to use it
for {
version <- S.attr("version")
date <- tryo(ISODateTimeFormat.dateTimeParser.parseDateTime(version))
} {
selectedDate = date
}
}
private def onSelect(date:DateTime) : JsCmd = {
diffRepos.get(uuid, date) match {
case Failure(m,_,_) => Alert("Error while trying to display node history. Error message:" + m)
case Empty => Alert("No history was retrieved for the chosen date")
case Full(sm) =>
SetHtml(hid,
DisplayNode.showPannedContent(None, sm.data, AcceptedInventory, "hist")) &
DisplayNode.jsInit(sm.data.node.main.id, sm.data.node.softwareIds,"hist", Some("node_tabs")
)
}
}
}
| Kegeruneku/rudder | rudder-web/src/main/scala/com/normation/rudder/web/snippet/node/NodeHistoryViewer.scala | Scala | agpl-3.0 | 5,284 |
// See LICENSE.txt for license details.
package examples
import chisel3.iotesters.{ChiselFlatSpec, Driver, PeekPokeTester}
class GCDTests(c: GCD) extends PeekPokeTester(c) {
val inputs = List( (48, 32), (7, 3), (100, 10) )
val outputs = List( 16, 1, 10)
var i = 0
do {
poke(c.io.a, inputs(i)._1)
poke(c.io.b, inputs(i)._2)
poke(c.io.load, 1)
step(1)
poke(c.io.load, 0)
var ready = false
do {
ready = peek(c.io.valid) == 1
step(1)
} while (t < 100 && ! ready)
expect(c.io.out, outputs(i))
i += 1
} while (t < 100 && i < 3)
if (t >= 100) fail
}
class GCDTester extends ChiselFlatSpec {
behavior of "GCD"
backends foreach {backend =>
it should s"test the basic gcd circuit" in {
Driver(() => new GCD, backend)((c) => new GCDTests(c)) should be (true)
}
}
}
| timtian090/Playground | chiselTutorial/src/test/scala/examples/GCDTests.scala | Scala | mit | 850 |
object EmptyObject
| som-snytt/dotty | tests/link/custom-lib/EmptyObject.scala | Scala | apache-2.0 | 19 |
package shred.man.cuda
import breeze.linalg.{DenseVector, DenseMatrix}
package object matchers
{
def approximately[A](expected: Array[A], delta: A)(implicit num: Numeric[A]) =
new ApproximateNumericArrayMatcher[A](expected, delta, num)
def approximately[A](expected: DenseMatrix[A], delta: A)(implicit num: Numeric[A]) =
new ApproximateDenseMatrixMatcher[A](expected, delta, num)
def approximately[A](expected: DenseVector[A], delta: A)(implicit num: Numeric[A]) =
new ApproximateDenseVectorMatcher[A](expected, delta, num)
}
| shredzzz/shredman | cuda/src/test/scala/shred/man/cuda/matchers/package.scala | Scala | apache-2.0 | 549 |
package dispatch.as.oauth
import dispatch.oauth._
import com.ning.http.client.Response
import com.ning.http.client.oauth._
object Token extends (Response => Either[String, RequestToken]) {
def apply(res: Response) = tokenDecode(dispatch.as.String(res))
private def decode(str: String) = java.net.URLDecoder.decode(str, "utf-8")
private def formDecode(str: String) =
(for (pair <- str.trim.split('&'))
yield pair.split('=')
).collect {
case Array(key, value) => decode(key) -> decode(value)
}
private def tokenDecode(str: String) = {
val params = formDecode(str)
(for {
("oauth_token", tok) <- params
("oauth_token_secret", secret) <- params
} yield new RequestToken(tok, secret)).headOption.toRight {
"No token found in response: \\n\\n" + str
}
}
}
| kkirsche/reboot | core/src/main/scala/as/oauth/token.scala | Scala | lgpl-3.0 | 819 |
package com.hyenawarrior.oldnorsedictionary.new_word.new_pos_helpers
import android.app.Activity
import android.content.Context
import android.view.{LayoutInflater, View}
import android.widget._
import com.hyenawarrior.OldNorseGrammar.grammar.enums.Case._
import com.hyenawarrior.OldNorseGrammar.grammar.enums.GNumber.{PLURAL, SINGULAR}
import com.hyenawarrior.OldNorseGrammar.grammar.nouns.stemclasses.enum.NounStemClassEnum._
import com.hyenawarrior.OldNorseGrammar.grammar.nouns.stemclasses.NounStemClass
import com.hyenawarrior.OldNorseGrammar.grammar.nouns.{Noun, NounFormType, NounType}
import com.hyenawarrior.OldNorseGrammar.grammar.enums.{Case, GNumber}
import com.hyenawarrior.OldNorseGrammar.grammar.nouns.stemclasses.enum.NounStemClassEnum
import com.hyenawarrior.oldnorsedictionary.R
import com.hyenawarrior.oldnorsedictionary.modelview.add_new_word_panel.NounDeclensionAdapter
import com.hyenawarrior.oldnorsedictionary.modelview.{EditTextTypeListener, ItemListener}
import com.hyenawarrior.oldnorsedictionary.new_word.pages.AddNewWordActivity._
import com.hyenawarrior.oldnorsedictionary.new_word.pages.WordData
/**
* Created by HyenaWarrior on 2017.04.17..
*/
object AddNewNounHelper
{
val NOUN_DECLENSIONS = Vector(
(SINGULAR, NOMINATIVE),
(SINGULAR, ACCUSATIVE),
(SINGULAR, DATIVE),
(SINGULAR, GENITIVE),
(PLURAL, NOMINATIVE),
(PLURAL, ACCUSATIVE),
(PLURAL, DATIVE),
(PLURAL, GENITIVE)
)
type Declension = (GNumber, Case)
}
class AddNewNounHelper(rootView: View, activity: Activity, stemClassSpinner: Spinner)
extends AbstractAddNewPosHelper(activity, stemClassSpinner, R.array.noun_types) {
type Override = (Option[NounType], Option[String])
type Parameters = (List[NounStemClassEnum], Override, Map[View, Override])
var selectedNounParameters: Parameters = (List(), (None, None), Map())
var latestNounData: Map[NounStemClassEnum, Noun] = Map()
val LL_NOUN_DECLS = rootView.findViewById[LinearLayout](R.id.llNounDeclensions)
val LL_DECL_LIST = LL_NOUN_DECLS.findViewById[LinearLayout](R.id.llNounDeclensionList)
val NounDeclensionAdapter = new NounDeclensionAdapter(activity, LL_DECL_LIST)
LL_NOUN_DECLS findViewById[View] R.id.rbIndef setOnClickListener DefinitenessListener
LL_NOUN_DECLS findViewById[View] R.id.rbDef setOnClickListener DefinitenessListener
//
override def activate(): Unit = {
super.activate()
LL_NOUN_DECLS.setVisibility(View.VISIBLE)
}
override def deactivate(): Unit = {
super.activate()
LL_NOUN_DECLS.setVisibility(View.GONE)
}
object DefinitenessListener extends View.OnClickListener {
override def onClick(v: View): Unit = NounDeclensionAdapter setDefinitness (v.getId match {
case R.id.rbIndef => false
case R.id.rbDef => true
})
}
def onRemoveOverride(tableRow: View) =
{
selectedNounParameters = selectedNounParameters match
{
case (nc, baseDef, map) => (nc, baseDef, map - tableRow)
}
fillNounForms()
}
//
def onPrimaryTextChange(str: String): Unit =
{
val (stemClass, (givenCaseNum, _), map) = selectedNounParameters
val strFixed = Option(str).filter(s => s.trim.nonEmpty)
selectedNounParameters = (stemClass, (givenCaseNum, strFixed), map)
fillNounForms()
}
override def createOverrideFormSetter(isPrimary: Boolean): View =
{
val inflater = getActivity.getSystemService(Context.LAYOUT_INFLATER_SERVICE).asInstanceOf[LayoutInflater]
val rowView = inflater.inflate(R.layout.new_word_overriding_def_row, null)
// add hint, it's necessary to be able to remove the overrides
val btnRemoveView = rowView.findViewById[View](R.id.ibRemove)
btnRemoveView.setTag(rowView)
btnRemoveView.setVisibility(if(isPrimary) View.GONE else View.VISIBLE)
// add text listeners
val etView = rowView.findViewById[EditText](R.id.etNewWord_Text)
val etListener = new EditTextTypeListener(
if(isPrimary) onPrimaryTextChange
else onTextFormOverride(rowView))
etView.addTextChangedListener(etListener)
//
val spNounDecl = rowView.findViewById[Spinner](R.id.spNounDecl)
val spListener = new ItemListener(
if(isPrimary) i => onNounDeclensionSelected(AddNewNounHelper.NOUN_DECLENSIONS(i))
else i => onNounDeclensionSelected2(rowView)(AddNewNounHelper.NOUN_DECLENSIONS(i)))
spNounDecl.setOnItemSelectedListener(spListener)
rowView
}
private def onNounDeclensionSelected(item: NounType): Unit =
{
val (stemClass, (_, givenBaseForm), map) = selectedNounParameters
selectedNounParameters = (stemClass, (Some(item), givenBaseForm), map)
fillNounForms()
}
override def onStemClassSelected(index: Int): Unit =
{
val newStemClassList = LOAD_STEM_CLASS_ENUMS(index)
val (_, givenBaseForm, map) = selectedNounParameters
selectedNounParameters = (newStemClassList, givenBaseForm, map)
fillNounForms()
}
//
override def onTextFormOverride(overridingView: View)(str: String): Unit =
{
val (stemClass, givenForm, map) = selectedNounParameters
val strFixed = Option(str).filter(s => s.trim.nonEmpty)
val overrideData = map.get(overridingView)
val newData = overrideData match
{
case Some((optNumCase, _)) => (optNumCase, strFixed)
case None => (None, strFixed)
}
selectedNounParameters = (stemClass, givenForm, map + (overridingView -> newData))
fillNounForms()
}
def onNounDeclensionSelected2(overridingView: View)(item: NounType): Unit =
{
val (stemClass, givenBaseForm, map) = selectedNounParameters
val overrideData = map.get(overridingView)
val newData = overrideData match
{
case Some((_, optStr)) => (Some(item), optStr)
case None => (Some(item), None)
}
selectedNounParameters = (stemClass, givenBaseForm, map + (overridingView -> newData))
fillNounForms()
}
val LOAD_STEM_CLASS_ENUMS: Vector[List[NounStemClassEnum]] = Vector(
List(),
List(STRONG_FEMININE_A1, STRONG_FEMININE_A2, STRONG_FEMININE_I, STRONG_FEMININE_R, WEAK_FEMININE_I, WEAK_FEMININE_U),
List(STRONG_MASCULINE_A, STRONG_MASCULINE_I, STRONG_MASCULINE_R, STRONG_MASCULINE_U, WEAK_MASCULINE_A, WEAK_MASCULINE_R),
List(STRONG_NEUTER, WEAK_NEUTER_U),
List(STRONG_MASCULINE_A),
List(STRONG_MASCULINE_I),
List(STRONG_MASCULINE_U),
List(STRONG_MASCULINE_R),
List(STRONG_FEMININE_A1),
List(STRONG_FEMININE_A2),
List(STRONG_FEMININE_I),
List(STRONG_FEMININE_R),
List(STRONG_NEUTER),
List(WEAK_MASCULINE_A),
List(WEAK_MASCULINE_R),
List(WEAK_FEMININE_I),
List(WEAK_FEMININE_U),
List(WEAK_NEUTER_U)
)
private def generateFormsFrom(stemClass: NounStemClass, baseDef: (NounFormType, String), map: Map[View, Override])
: Option[Noun] = try {
val mapForms: Map[NounFormType, String] = map.values.map {
case (Some(nf), Some(str)) => (nf, false) -> str
}.toMap
val baseForm: Map[NounFormType, String] = Map(baseDef)
Some(Noun(stemClass, baseForm ++ mapForms))
} catch {
case e: RuntimeException =>
val msg = e.getMessage
android.util.Log.w(AddNewVerbHelper.getClass.getSimpleName, msg)
None
}
private def fillNounForms(): Unit = selectedNounParameters match
{
case (maybeEmptyList, (Some(numCase), Some(str)), map) =>
val listOfNSCE = if(maybeEmptyList.isEmpty) NounStemClassEnum.values.toList else maybeEmptyList
val wordMaps: List[(NounStemClassEnum, Noun)] = listOfNSCE
.map(nsce => nsce -> generateFormsFrom(nsce, ((numCase, false), str), map))
.collect{ case (k, Some(noun)) => k -> noun }
NounDeclensionAdapter resetItems wordMaps
latestNounData = wordMaps.toMap
case _ => ()
}
override def getWordFormsBy(view: View): WordData =
{
val optNounStemClassE = NounDeclensionAdapter.getSelectorTagOf(view)
optNounStemClassE match
{
case Some(nounStemClassE) =>
val selectedNoun = latestNounData(nounStemClassE)
WordData(selectedNoun, List())
case _ => throw new IllegalStateException("Unknown control")
}
}
}
| HyenaSoftware/IG-Dictionary | app/src/main/scala/com/hyenawarrior/oldnorsedictionary/new_word/new_pos_helpers/AddNewNounHelper.scala | Scala | lgpl-3.0 | 7,905 |
package mesosphere.elasticsearch
import org.yaml.snakeyaml.Yaml
import java.io.FileReader
import java.util
import scala.collection.JavaConverters._
import org.apache.commons.cli.MissingArgumentException
import java.net.InetAddress
import org.apache.log4j.{Level, BasicConfigurator}
/**
* ElasticSearch on Mesos
* Takes care of most of the "annoying things" like distributing binaries and configuration out to the nodes.
*
* @author erich<IDonLikeSpam>nachbar.biz
* @author Connor Doyle ([email protected])
* @author Florian Leibert ([email protected])
*/
object Main extends App with Logger {
val yaml = new Yaml()
val mesosConf = yaml.load(new FileReader("config/mesos.yml"))
.asInstanceOf[util.LinkedHashMap[String, Any]].asScala
// Get configs out of the mesos.yaml file
val execUri = mesosConf.getOrElse("mesos.executor.uri",
throw new MissingArgumentException("Please specify the mesos.executor.uri")).toString
val masterUrl = mesosConf.getOrElse("mesos.master.url",
throw new MissingArgumentException("Please specify the mesos.master.url")).toString
val numberOfHwNodes = mesosConf.getOrElse("elasticsearch.noOfHwNodes", 1).toString.toInt
val confServerPort = mesosConf.getOrElse("elasticsearch.confServer.port", 8282).toString.toInt
val confServerHostName = mesosConf.getOrElse("elasticsearch.confServer.hostname",
InetAddress.getLocalHost().getHostName()).toString
// Find all resource.* settings in mesos.yaml and prep them for submission to Mesos
val resources = mesosConf.filter {
_._1.startsWith("resource.")
}.map {
case (k, v) => k.replaceAllLiterally("resource.", "") -> v.toString.toFloat
}
//TODO load the ElasticSearch log-properties file
BasicConfigurator.configure()
getRootLogger.setLevel(Level.INFO)
info("Starting ElasticSearch on Mesos.")
// Instanciate framework and scheduler
val scheduler = new ElasticSearchScheduler(masterUrl,
execUri,
confServerHostName,
confServerPort,
resources,
numberOfHwNodes)
val schedThred = new Thread(scheduler)
schedThred.start()
scheduler.waitUnitInit
// Start serving the ElasticMesos config
val configServer = new ConfigServer(confServerPort, "config", scheduler)
info("ElasticSearch nodes starting on: " + scheduler.taskSet.mkString(","))
}
| mesosphere/elasticsearch-mesos | src/main/scala/mesosphere/elasticsearch/Main.scala | Scala | apache-2.0 | 2,320 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ml.dmlc.mxnetexamples.rnn
import ml.dmlc.mxnet.Callback.Speedometer
import ml.dmlc.mxnet._
import BucketIo.BucketSentenceIter
import ml.dmlc.mxnet.optimizer.SGD
import org.kohsuke.args4j.{CmdLineParser, Option}
import org.slf4j.{Logger, LoggerFactory}
import scala.collection.JavaConverters._
/**
* Bucketing LSTM examples
* @author Yizhi Liu
*/
class LstmBucketing {
@Option(name = "--data-train", usage = "training set")
private val dataTrain: String = "example/rnn/ptb.train.txt"
@Option(name = "--data-val", usage = "validation set")
private val dataVal: String = "example/rnn/ptb.valid.txt"
@Option(name = "--num-epoch", usage = "the number of training epoch")
private val numEpoch: Int = 5
@Option(name = "--gpus", usage = "the gpus will be used, e.g. '0,1,2,3'")
private val gpus: String = null
@Option(name = "--cpus", usage = "the cpus will be used, e.g. '0,1,2,3'")
private val cpus: String = null
@Option(name = "--save-model-path", usage = "the model saving path")
private val saveModelPath: String = "model/lstm"
}
object LstmBucketing {
private val logger: Logger = LoggerFactory.getLogger(classOf[LstmBucketing])
def perplexity(label: NDArray, pred: NDArray): Float = {
val labelArr = label.T.toArray.map(_.toInt)
var loss = .0
(0 until pred.shape(0)).foreach(i =>
loss -= Math.log(Math.max(1e-10f, pred.slice(i).toArray(labelArr(i))))
)
Math.exp(loss / labelArr.length).toFloat
}
def main(args: Array[String]): Unit = {
val inst = new LstmBucketing
val parser: CmdLineParser = new CmdLineParser(inst)
try {
parser.parseArgument(args.toList.asJava)
val contexts =
if (inst.gpus != null) inst.gpus.split(',').map(id => Context.gpu(id.trim.toInt))
else if (inst.cpus != null) inst.cpus.split(',').map(id => Context.cpu(id.trim.toInt))
else Array(Context.cpu(0))
val batchSize = 32
val buckets = Array(10, 20, 30, 40, 50, 60)
val numHidden = 200
val numEmbed = 200
val numLstmLayer = 2
val learningRate = 0.01f
val momentum = 0.0f
logger.info("Building vocab ...")
val vocab = BucketIo.defaultBuildVocab(inst.dataTrain)
class BucketSymGen extends SymbolGenerator {
override def generate(key: AnyRef): Symbol = {
val seqLen = key.asInstanceOf[Int]
Lstm.lstmUnroll(numLstmLayer, seqLen, vocab.size,
numHidden = numHidden, numEmbed = numEmbed, numLabel = vocab.size)
}
}
val initC = (0 until numLstmLayer).map(l =>
(s"l${l}_init_c", (batchSize, numHidden))
)
val initH = (0 until numLstmLayer).map(l =>
(s"l${l}_init_h", (batchSize, numHidden))
)
val initStates = initC ++ initH
val dataTrain = new BucketSentenceIter(inst.dataTrain, vocab,
buckets, batchSize, initStates)
val dataVal = new BucketSentenceIter(inst.dataVal, vocab,
buckets, batchSize, initStates)
logger.info("Start training ...")
val model = FeedForward.newBuilder(new BucketSymGen())
.setContext(contexts)
.setNumEpoch(inst.numEpoch)
.setOptimizer(new SGD(learningRate = learningRate, momentum = momentum, wd = 0.00001f))
.setInitializer(new Xavier(factorType = "in", magnitude = 2.34f))
.setTrainData(dataTrain)
.setEvalData(dataVal)
.setEvalMetric(new CustomMetric(perplexity, name = "perplexity"))
.setBatchEndCallback(new Speedometer(batchSize, 50))
.build()
model.save(inst.saveModelPath)
} catch {
case ex: Exception =>
logger.error(ex.getMessage, ex)
parser.printUsage(System.err)
sys.exit(1)
}
}
}
| likelyzhao/mxnet | scala-package/examples/src/main/scala/ml/dmlc/mxnetexamples/rnn/LstmBucketing.scala | Scala | apache-2.0 | 4,534 |
package scala.tools.nsc.backend.jvm
import org.junit.Assert._
import org.junit.Test
import scala.collection.JavaConverters._
import scala.tools.asm.Handle
import scala.tools.asm.tree.InvokeDynamicInsnNode
import scala.tools.testing.BytecodeTesting
class IndyLambdaTest extends BytecodeTesting {
import compiler._
@Test def boxingBridgeMethodUsedSelectively(): Unit = {
def implMethodDescriptorFor(code: String): String = {
val method = compileAsmMethods(s"""def f = $code """).find(_.name == "f").get
val x = method.instructions.iterator.asScala.toList
x.flatMap {
case insn : InvokeDynamicInsnNode => insn.bsmArgs.collect { case h : Handle => h.getDesc }
case _ => Nil
}.head
}
val obj = "Ljava/lang/Object;"
val str = "Ljava/lang/String;"
// unspecialized functions that have a primitive in parameter or return position
// give rise to a "boxing bridge" method (which has the suffix `$adapted`).
// This is because Scala's unboxing of null values gives zero, whereas Java's throw a NPE.
// 1. Here we show that we are calling the boxing bridge (the lambda bodies here are compiled into
// methods of `(I)Ljava/lang/Object;` / `(I)Ljava/lang/Object;` respectively.)
assertEquals(s"($obj)$obj", implMethodDescriptorFor("(x: Int) => new Object"))
assertEquals(s"($obj)$obj", implMethodDescriptorFor("(x: Object) => 0"))
// 2a. We don't need such adaptations for parameters or return values with types that differ
// from Object due to other generic substitution, LambdaMetafactory will downcast the arguments.
assertEquals(s"($str)$str", implMethodDescriptorFor("(x: String) => x"))
// 2b. Testing 2a. in combination with 1.
assertEquals(s"($obj)$str", implMethodDescriptorFor("(x: Int) => \\"\\""))
assertEquals(s"($str)$obj", implMethodDescriptorFor("(x: String) => 0"))
// 3. Specialized functions, don't need any of this as they implement a method like `apply$mcII$sp`,
// and the (un)boxing is handled in the base class in code emitted by scalac.
assertEquals("(I)I", implMethodDescriptorFor("(x: Int) => x"))
// non-builtin sams are like specialized functions
compileToBytes("class VC(private val i: Int) extends AnyVal; trait FunVC { def apply(a: VC): VC }")
assertEquals("(I)I", implMethodDescriptorFor("((x: VC) => x): FunVC"))
compileToBytes("trait Fun1[T, U] { def apply(a: T): U }")
assertEquals(s"($obj)$str", implMethodDescriptorFor("(x => x.toString): Fun1[Int, String]"))
assertEquals(s"($obj)$obj", implMethodDescriptorFor("(x => println(x)): Fun1[Int, Unit]"))
assertEquals(s"($obj)$str", implMethodDescriptorFor("((x: VC) => \\"\\") : Fun1[VC, String]"))
assertEquals(s"($str)$obj", implMethodDescriptorFor("((x: String) => new VC(0)) : Fun1[String, VC]"))
compileToBytes("trait Coll[A, Repr] extends Any")
compileToBytes("final class ofInt(val repr: Array[Int]) extends AnyVal with Coll[Int, Array[Int]]")
assertEquals(s"([I)$obj", implMethodDescriptorFor("((xs: Array[Int]) => new ofInt(xs)): Array[Int] => Coll[Int, Array[Int]]"))
}
}
| felixmulder/scala | test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala | Scala | bsd-3-clause | 3,142 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.matchers
// T is the type of the object that has a Boolean property to verify with an instance of this trait
// This is not a subtype of BeMatcher, because BeMatcher only works after "be", but
// BePropertyMatcher will work after "be", "be a", or "be an"
/**
* Trait extended by matcher objects, which may appear after the word <code>be</code>, that can match against a <code>Boolean</code>
* property. The match will succeed if and only if the <code>Boolean</code> property equals <code>true</code>.
* The object containing the property, which must be of the type specified by the <code>BePropertyMatcher</code>'s type
* parameter <code>T</code>, is passed to the <code>BePropertyMatcher</code>'s
* <code>apply</code> method. The result is a <code>BePropertyMatchResult</code>.
* A <code>BePropertyMatcher</code> is, therefore, a function from the specified type, <code>T</code>, to
* a <code>BePropertyMatchResult</code>.
*
* <p>
* Although <code>BePropertyMatcher</code>
* and <code>Matcher</code> represent similar concepts, they have no inheritance relationship
* because <code>Matcher</code> is intended for use right after <code>should</code> or <code>must</code>
* whereas <code>BePropertyMatcher</code> is intended for use right after <code>be</code>.
* </p>
*
* <p>
* A <code>BePropertyMatcher</code> essentially allows you to write statically typed <code>Boolean</code>
* property assertions similar to the dynamic ones that use symbols:
* </p>
*
* <pre class="stHighlight">
* tempFile should be a ('file) // dynamic: uses reflection
* tempFile should be a (file) // type safe: only works on Files; no reflection used
* </pre>
*
* <p>
* One good way to organize custom matchers is to place them inside one or more traits that
* you can then mix into the suites or specs that need them. Here's an example that
* includes two <code>BePropertyMatcher</code>s:
* </p>
*
* <pre class="stHighlight">
* trait CustomMatchers {
*
* class FileBePropertyMatcher extends BePropertyMatcher[java.io.File] {
* def apply(left: java.io.File) = BePropertyMatchResult(left.isFile, "file")
* }
*
* class DirectoryBePropertyMatcher extends BePropertyMatcher[java.io.File] {
* def apply(left: java.io.File) = BePropertyMatchResult(left.isDirectory, "directory")
* }
*
* val file = new FileBePropertyMatcher
* val directory = new DirectoryBePropertyMatcher
* }
* </pre>
*
* <p>
* Because the type parameter of these two <code>BePropertyMatcher</code>s is <code>java.io.File</code>, they
* can only be used with instances of that type. (The compiler will enforce this.) All they do is create a
* <code>BePropertyMatchResult</code> whose <code>matches</code> field is <code>true</code> if and only if the <code>Boolean</code> property
* is <code>true</code>. The second field, <code>propertyName</code>, is simply the string name of the property.
* The <code>file</code> and <code>directory</code> <code>val</code>s create variables that can be used in
* matcher expressions that test whether a <code>java.io.File</code> is a file or a directory. Here's an example:
* </p>
*
* <pre class="stHighlight">
* class ExampleSpec extends Spec with ShouldMatchers with CustomMatchers {
*
* describe("A temp file") {
*
* it("should be a file, not a directory") {
*
* val tempFile = java.io.File.createTempFile("delete", "me")
*
* try {
* tempFile should be a (file)
* tempFile should not be a (directory)
* }
* finally {
* tempFile.delete()
* }
* }
* }
* }
* </pre>
*
* <p>
* These matches should succeed, but if for example the first match, <code>tempFile should be a (file)</code>, were to fail, you would get an error message like:
* </p>
*
* <pre class="stExamples">
* /tmp/delme1234me was not a file
* </pre>
*
* <p>
* For more information on <code>BePropertyMatchResult</code> and the meaning of its fields, please
* see the documentation for <a href="BePropertyMatchResult.html"><code>BePropertyMatchResult</code></a>. To understand why <code>BePropertyMatcher</code>
* is contravariant in its type parameter, see the section entitled "Matcher's variance" in the
* documentation for <a href="../Matcher.html"><code>Matcher</code></a>.
* </p>
*
* @author Bill Venners
*/
trait BePropertyMatcher[-T] extends Function1[T, BePropertyMatchResult] {
thisBePropertyMatcher =>
/**
* Check to see if a <code>Boolean</code> property on the specified object, <code>objectWithProperty</code>, matches its
* expected value, and report the result in
* the returned <code>BePropertyMatchResult</code>. The <code>objectWithProperty</code> is
* usually the value to the left of a <code>should</code> or <code>must</code> invocation. For example, <code>tempFile</code>
* would be passed as the <code>objectWithProperty</code> in:
*
* <pre class="stHighlight">
* tempFile should be a (file)
* </pre>
*
* @param objectWithProperty the object with the <code>Boolean</code> property against which to match
* @return the <code>BePropertyMatchResult</code> that represents the result of the match
*/
def apply(objectWithProperty: T): BePropertyMatchResult
/**
* Compose this <code>BePropertyMatcher</code> with the passed function, returning a new <code>BePropertyMatcher</code>.
*
* <p>
* This method overrides <code>compose</code> on <code>Function1</code> to
* return a more specific function type of <code>BePropertyMatcher</code>.
* </p>
*/
override def compose[U](g: U => T): BePropertyMatcher[U] =
new BePropertyMatcher[U] {
def apply(u: U) = thisBePropertyMatcher.apply(g(u))
}
}
/**
* Companion object for trait <code>BePropertyMatcher</code> that provides a
* factory method that creates a <code>BePropertyMatcher[T]</code> from a
* passed function of type <code>(T => BePropertyMatchResult)</code>.
*
* @author Bill Venners
*/
object BePropertyMatcher {
/**
* Factory method that creates a <code>BePropertyMatcher[T]</code> from a
* passed function of type <code>(T => BePropertyMatchResult)</code>.
*
* @author Bill Venners
*/
def apply[T](fun: T => BePropertyMatchResult)(implicit ev: Manifest[T]): BePropertyMatcher[T] =
new BePropertyMatcher[T] {
def apply(left: T) = fun(left)
override def toString: String = "BePropertyMatcher[" + ev.erasure.getName + "](" + ev.erasure.getName + " => BePropertyMatchResult)"
}
}
| travisbrown/scalatest | src/main/scala/org/scalatest/matchers/BePropertyMatcher.scala | Scala | apache-2.0 | 7,139 |
/*
* Copyright (c) 2016 eBay Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ebay.rtran.maven
import java.io.File
import org.apache.commons.io.FileUtils
import org.scalatest.{BeforeAndAfterEach, FlatSpecLike, Matchers}
import scala.collection.JavaConversions._
class MavenRemovePluginsRuleTest extends FlatSpecLike with Matchers with BeforeAndAfterEach {
val projectRoot = new File(getClass.getClassLoader.getResource("mvnproject").getFile)
val destProjectRoot = new File(projectRoot.getParentFile, projectRoot.getName + "-bak")
override def beforeEach = {
FileUtils.deleteQuietly(destProjectRoot)
FileUtils.copyDirectory(projectRoot, destProjectRoot)
}
"MavenRemovePluginsRule" should "be able to remove both plugins and managed plugins" in {
val ruleConfig = MavenRemoveManagedPluginsRuleConfig(
Set(SimplePlugin(artifactId = "maven-source-plugin"))
)
val projectCtx = new MavenProjectCtx(destProjectRoot)
val provider = new MultiModuleMavenModelProvider
val model = provider create projectCtx
val rule = new MavenRemovePluginsRule(ruleConfig)
provider save rule.transform(model)
val transformed = provider create projectCtx
transformed.parents.head
.pomModel.getBuild.getPluginManagement.getPlugins
.exists(_.getArtifactId == "maven-source-plugin") should be (false)
transformed.parents.head
.pomModel.getBuild.getPlugins
.exists(_.getArtifactId == "maven-source-plugin") should be (false)
}
"MavenRemovePluginsRule" should "not remove plugins or managed plugins that don't exist" in {
val ruleConfig = MavenRemoveManagedPluginsRuleConfig(
Set(SimplePlugin(artifactId = "maven-surefire-plugin"))
)
val projectCtx = new MavenProjectCtx(destProjectRoot)
val provider = new MultiModuleMavenModelProvider
val model = provider create projectCtx
val rule = new MavenRemovePluginsRule(ruleConfig)
val mpSize = model.parents.head.pomModel.getBuild.getPluginManagement.getPlugins.size
val pluginSize = model.parents.head.pomModel.getBuild.getPlugins.size
provider save rule.transform(model)
val transformed = provider create projectCtx
transformed.parents.head
.pomModel.getBuild.getPluginManagement.getPlugins.size should be (mpSize)
transformed.parents.head
.pomModel.getBuild.getPlugins.size should be (pluginSize)
}
"MavenRemovePluginsRule" should "remove both plugins and managed plugins matches that match other condition" in {
val ruleConfig = MavenRemoveManagedPluginsRuleConfig(
Set(SimplePlugin(artifactId = "maven-source-plugin", version = Some("2.2.1")))
)
val projectCtx = new MavenProjectCtx(destProjectRoot)
val provider = new MultiModuleMavenModelProvider
val model = provider create projectCtx
val rule = new MavenRemovePluginsRule(ruleConfig)
provider save rule.transform(model)
val transformed = provider create projectCtx
transformed.parents.head
.pomModel.getBuild.getPluginManagement.getPlugins
.exists(_.getArtifactId == "maven-source-plugin") should be (false)
transformed.parents.head
.pomModel.getBuild.getPlugins
.exists(_.getArtifactId == "maven-source-plugin") should be (false)
}
"MavenRemoveManagedPluginsRule" should "not remove plugins or managed plugins if other condition doesn't match" in {
val ruleConfig = MavenRemoveManagedPluginsRuleConfig(
Set(SimplePlugin(artifactId = "maven-source-plugin", version = Some("2.2.0")))
)
val projectCtx = new MavenProjectCtx(destProjectRoot)
val provider = new MultiModuleMavenModelProvider
val model = provider create projectCtx
val rule = new MavenRemovePluginsRule(ruleConfig)
provider save rule.transform(model)
val transformed = provider create projectCtx
transformed.parents.head
.pomModel.getBuild.getPluginManagement.getPlugins
.exists(_.getArtifactId == "maven-source-plugin") should be (true)
transformed.parents.head
.pomModel.getBuild.getPlugins
.exists(_.getArtifactId == "maven-source-plugin") should be (true)
}
}
| keshin/RTran | rtran-maven/src/test/scala/com/ebay/rtran/maven/MavenRemovePluginsRuleTest.scala | Scala | apache-2.0 | 4,672 |
/*
* Copyright (c) 2013-16 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import scala.language.experimental.macros
import scala.reflect.macros.whitebox
package object shapeless {
def unexpected : Nothing = sys.error("Unexpected invocation")
// Basic definitions
type Id[+T] = T
type Const[C] = {
type λ[T] = C
}
type ¬[T] = T => Nothing
type ¬¬[T] = ¬[¬[T]]
type ∧[T, U] = T with U
type ∨[T, U] = ¬[¬[T] ∧ ¬[U]]
// Type-lambda for context bound
type |∨|[T, U] = {
type λ[X] = ¬¬[X] <:< (T ∨ U)
}
// Type inequalities
trait =:!=[A, B] extends Serializable
implicit def neq[A, B] : A =:!= B = new =:!=[A, B] {}
implicit def neqAmbig1[A] : A =:!= A = unexpected
implicit def neqAmbig2[A] : A =:!= A = unexpected
trait <:!<[A, B] extends Serializable
implicit def nsub[A, B] : A <:!< B = new <:!<[A, B] {}
implicit def nsubAmbig1[A, B >: A] : A <:!< B = unexpected
implicit def nsubAmbig2[A, B >: A] : A <:!< B = unexpected
// Type-lambda for context bound
type |¬|[T] = {
type λ[U] = U <:!< T
}
// Quantifiers
type ∃[P[_]] = P[T] forSome { type T }
type ∀[P[_]] = ¬[∃[({ type λ[X] = ¬[P[X]]})#λ]]
/** `Optic` definitions */
val optic = OpticDefns
val lens = OpticDefns
val prism = OpticDefns
val ^ = Path
/** `Nat` literals */
val nat = Nat
/** 'Fin' */
val fin = Fin
/** `Poly` definitions */
val poly = PolyDefns
import poly._
/** Dependent nullary function type. */
trait DepFn0 {
type Out
def apply(): Out
}
/** Dependent unary function type. */
trait DepFn1[T] {
type Out
def apply(t: T): Out
}
/** Dependent binary function type. */
trait DepFn2[T, U] {
type Out
def apply(t: T, u: U): Out
}
/** The SYB everything combinator */
type Everything[F <: Poly, K <: Poly, T] = Case1[EverythingAux[F, K], T]
class ApplyEverything[F <: Poly] {
def apply(k : Poly): EverythingAux[F, k.type] {} = new EverythingAux[F, k.type]
}
def everything(f: Poly): ApplyEverything[f.type] {} = new ApplyEverything[f.type]
/** The SYB everywhere combinator */
type Everywhere[F <: Poly, T] = Case1[EverywhereAux[F], T]
def everywhere(f: Poly): EverywhereAux[f.type] {} = new EverywhereAux[f.type]
def cachedImplicit[T]: T = macro CachedImplicitMacros.cachedImplicitImpl[T]
}
package shapeless {
@macrocompat.bundle
class CachedImplicitMacros(val c: whitebox.Context) {
import c.universe._
def cachedImplicitImpl[T](implicit tTag: WeakTypeTag[T]): Tree = {
val casted = c.asInstanceOf[reflect.macros.runtime.Context]
val typer = casted.callsiteTyper
val global: casted.universe.type = casted.universe
val analyzer: global.analyzer.type = global.analyzer
val tCtx = typer.context
val owner = tCtx.owner
if(!owner.isVal && !owner.isLazy)
c.abort(c.enclosingPosition, "cachedImplicit should only be used to initialize vals and lazy vals")
val tTpe = weakTypeOf[T]
val application = casted.macroApplication
val tpe = {
val tpe0 =
if (tTpe.typeSymbol.isParameter) owner.tpe.asInstanceOf[Type]
else tTpe
tpe0.finalResultType
}.asInstanceOf[global.Type]
// Run our own custom implicit search that isn't allowed to find
// the thing we are enclosed in
val sCtx = tCtx.makeImplicit(false)
val is = new analyzer.ImplicitSearch(
tree = application,
pt = tpe,
isView = false,
context0 = sCtx,
pos0 = c.enclosingPosition.asInstanceOf[global.Position]
) {
override def searchImplicit(
implicitInfoss: List[List[analyzer.ImplicitInfo]],
isLocalToCallsite: Boolean
): analyzer.SearchResult = {
val filteredInput = implicitInfoss.map { infos =>
infos.filter { info =>
val sym = if(info.sym.isLazy) info.sym else info.sym.accessedOrSelf
sym.owner != owner.owner || (!sym.isVal && !sym.isLazy)
}
}
super.searchImplicit(filteredInput, isLocalToCallsite)
}
}
val best = is.bestImplicit
if (best.isFailure) {
val errorMsg = tpe.typeSymbolDirect match {
case analyzer.ImplicitNotFoundMsg(msg) =>
msg.format(TermName("evidence").asInstanceOf[global.TermName], tpe)
case _ =>
s"Could not find an implicit value of type $tpe to cache"
}
c.abort(c.enclosingPosition, errorMsg)
} else {
best.tree.asInstanceOf[Tree]
}
}
}
}
| lukasz-golebiewski/shapeless | core/src/main/scala/shapeless/package.scala | Scala | apache-2.0 | 5,151 |
package com.stephentu.sql
import scala.collection.mutable.{ArrayBuffer, HashMap}
abstract trait Symbol {
val ctx: Context
}
// a symbol corresponding to a reference to a column from a relation in this context.
// note that this column is not necessarily projected in this context
case class ColumnSymbol(relation: String, column: String, ctx: Context) extends Symbol
// a symbol corresponding to a reference to a projection in this context.
// note b/c of sql scoping rules, this symbol can only appear in the keys of a
// {group,order} by clause (but not the having clause of a group by)
case class ProjectionSymbol(name: String, ctx: Context) extends Symbol
abstract trait ProjectionType
case class NamedProjection(name: String, expr: SqlExpr, pos: Int) extends ProjectionType
case object WildcardProjection extends ProjectionType
class Context(val parent: Either[Definitions, Context]) {
val relations = new HashMap[String, Relation]
val projections = new ArrayBuffer[ProjectionType]
// is this context a parent of that context?
def isParentOf(that: Context): Boolean = {
var cur = that.parent.right.toOption.orNull
while (cur ne null) {
if (cur eq this) return true
cur = cur.parent.right.toOption.orNull
}
false
}
// lookup a projection with the given name in this context.
// if there are multiple projections with
// the given name, then it is undefined which projection is returned.
// this also returns projections which exist due to wildcard projections.
def lookupProjection(name: String): Option[SqlExpr] =
lookupProjection0(name, true)
// ignores wildcards, and treats preceeding wildcards as a single projection
// in terms of index calculation
def lookupNamedProjectionIndex(name: String): Option[Int] = {
projections.zipWithIndex.flatMap {
case (NamedProjection(n0, _, _), idx) if n0 == name => Some(idx)
case _ => None
}.headOption
}
private def lookupProjection0(name: String, allowWildcard: Boolean): Option[SqlExpr] = {
projections.flatMap {
case NamedProjection(n0, expr, _) if n0 == name => Some(expr)
case WildcardProjection if allowWildcard =>
def lookupRelation(r: Relation): Option[SqlExpr] = r match {
case TableRelation(t) =>
defns.lookup(t, name).map(tc => {
FieldIdent(Some(t), name, ColumnSymbol(t, name, this), this)
})
case SubqueryRelation(s) =>
s.ctx.projections.flatMap {
case NamedProjection(n0, expr, _) if n0 == name => Some(expr)
case WildcardProjection =>
assert(allowWildcard)
s.ctx.lookupProjection(name)
case _ => None
}.headOption
}
relations.flatMap {
case (_, r) => lookupRelation(r)
}
case _ => None
}.headOption
}
val defns = lookupDefns()
private def lookupDefns(): Definitions = parent match {
case Left(d) => d
case Right(p) => p.lookupDefns()
}
// finds an column by name
def lookupColumn(qual: Option[String], name: String, inProjScope: Boolean): Seq[Symbol] =
lookupColumn0(qual, name, inProjScope, None)
private def lookupColumn0(qual: Option[String],
name: String,
inProjScope: Boolean,
topLevel: Option[String]): Seq[Symbol] = {
def lookupRelation(topLevel: String, r: Relation): Seq[Symbol] = r match {
case TableRelation(t) =>
defns.lookup(t, name).map(tc => ColumnSymbol(topLevel, name, this)).toSeq
case SubqueryRelation(s) =>
s.ctx.projections.flatMap {
case NamedProjection(n0, expr, _) if n0 == name =>
Seq(ColumnSymbol(topLevel, name, this))
case WildcardProjection =>
s.ctx.lookupColumn0(None, name, inProjScope, Some(topLevel))
case _ => Seq.empty
}
}
val res = qual match {
case Some(q) =>
relations.get(q).map(x => lookupRelation(topLevel.getOrElse(q), x)).getOrElse(Seq.empty)
case None =>
// projection lookups can only happen if no qualifier, and we currently give preference
// to column lookups first - TODO: verify if this is correct sql scoping rules.
val r = relations.flatMap { case (r, x) => lookupRelation(topLevel.getOrElse(r), x) }.toSeq
if (r.isEmpty) {
lookupProjection0(name, false).map(_ => Seq(ProjectionSymbol(name, this))).getOrElse(Seq.empty)
} else r
}
if (res.isEmpty) {
// TODO:
// lookup in parent- assume lookups in parent scope never reads projections-
// ie we currently assume no sub-selects in {group,order} by clauses
parent.right.toOption.map(_.lookupColumn(qual, name, false)).getOrElse(Seq.empty)
} else res
}
// HACK: don't use default toString(), so that built-in ast node's
// toString can be used for structural equality
override def toString = "Context"
}
| debugger87/scala-sql-parser | src/main/scala/context.scala | Scala | mit | 5,018 |
package collins.solr
import java.util.Date
import collins.models.Asset
import collins.models.AssetMeta.ValueType.Boolean
import collins.models.AssetMeta.ValueType.Double
import collins.models.AssetMeta.ValueType.Integer
import collins.models.AssetMeta.ValueType.String
import collins.models.AssetMetaValue
import collins.models.IpAddresses
import collins.models.IpmiInfo
import collins.models.MetaWrapper
import collins.models.Truthy
import collins.solr.UpperCaseString.UppercaseString2String
import collins.solr.UpperCaseString.string2UpperCaseString
import collins.util.views.Formatter
import collins.solr.Solr.AssetSolrDocument
import collins.solr.SolrKeyFlag._
/**
* asset meta values are all converted into strings with the meta name as the
* solr key, using group_id to group values in to multi-valued keys
*/
object AssetSerializer extends SolrSerializer[Asset](AssetDocType) {
val generatedFields = SolrKey("NUM_DISKS", Integer, Dynamic, SingleValued, Sortable) :: SolrKey("KEYS", String, Dynamic, MultiValued, NotSortable) :: Nil
val res = AssetDocType.keyResolver
def getFields(asset: Asset, indexTime: Date) = postProcess {
val opt = Map[SolrKey, Option[SolrValue]](
res("UPDATED").get -> asset.updated.map{t => SolrStringValue(Formatter.solrDateFormat(t), StrictUnquoted)},
res("DELETED").get -> asset.deleted.map{t => SolrStringValue(Formatter.solrDateFormat(t), StrictUnquoted)},
res("STATE").get -> asset.state.map{s => SolrStringValue(s.name, StrictUnquoted)},
res("IP_ADDRESS").get -> {
val a = IpAddresses.findAllByAsset(asset, false)
if (a.size > 0) {
val addresses = SolrMultiValue(MultiSet.fromSeq(a.map{a => SolrStringValue(a.dottedAddress, StrictUnquoted)}))
Some(addresses)
} else {
None
}
}
).collect{case(k, Some(v)) => (k,v)}
val ipmi: AssetSolrDocument = IpmiInfo.findByAsset(asset).map{ipmi => Map(
res(IpmiInfo.Enum.IpmiAddress.toString).get -> SolrStringValue(ipmi.dottedAddress, StrictUnquoted)
)}.getOrElse(Map())
opt ++ ipmi ++ Map[SolrKey, SolrValue](
res("ID").get -> SolrIntValue(asset.id.toInt),
res("TAG").get -> SolrStringValue(asset.tag, StrictUnquoted),
res("STATUS").get -> SolrStringValue(asset.getStatusName, StrictUnquoted),
res("TYPE").get -> SolrStringValue(asset.getTypeName, StrictUnquoted),
res("CREATED").get -> SolrStringValue(Formatter.solrDateFormat(asset.created), StrictUnquoted)
) ++ serializeMetaValues(AssetMetaValue.findByAsset(asset, false))
}
def getUUID(asset: Asset) = asset.id
//FIXME: The parsing logic here is duplicated in AssetMeta.validateValue
def serializeMetaValues(values: Seq[MetaWrapper]): AssetSolrDocument = {
def process(build: AssetSolrDocument, remain: Seq[MetaWrapper]): AssetSolrDocument = remain match {
case head :: tail => {
val newval = head.getValueType() match {
case Boolean => SolrBooleanValue((new Truthy(head.getValue())).isTruthy)
case Integer => SolrIntValue(java.lang.Integer.parseInt(head.getValue()))
case Double => SolrDoubleValue(java.lang.Double.parseDouble(head.getValue()))
case _ => SolrStringValue(head.getValue(), StrictUnquoted)
}
val solrKey = res(head.getName()).get
val mergedval = build.get(solrKey) match {
case Some(exist) => exist match {
case s: SolrSingleValue => SolrMultiValue(MultiSet(s, newval), newval.valueType)
case m: SolrMultiValue => m + newval
}
case None => newval
}
process(build + (solrKey -> mergedval), tail)
}
case _ => build
}
process(Map(), values)
}
def postProcess(doc: AssetSolrDocument): AssetSolrDocument = {
val disks:Option[Tuple2[SolrKey, SolrValue]] = doc.find{case (k,v) => k.name == "DISK_SIZE_BYTES"}.map{case (k,v) => (res("NUM_DISKS").get -> SolrIntValue(v match {
case s:SolrSingleValue => 1
case SolrMultiValue(vals, _) => vals.size.toInt
}))}
val newFields = List(disks).flatten.toMap
val almostDone = doc ++ newFields
val keyList = SolrMultiValue(MultiSet.fromSeq(almostDone.map{case (k,v) => SolrStringValue(k.name, StrictUnquoted)}.toSeq), String)
//val sortKeys = almostDone.map{case(k,v) => k.sortify(v)}.flatten
almostDone + (res("KEYS").get -> keyList)
}
}
| box/collins | app/collins/solr/asset/AssetSerializer.scala | Scala | apache-2.0 | 4,419 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.expressions.utils
import org.apache.flink.api.common.typeinfo.{BasicArrayTypeInfo, PrimitiveArrayTypeInfo, Types}
import org.apache.flink.api.java.typeutils.{ObjectArrayTypeInfo, RowTypeInfo}
import org.apache.flink.table.util.DateTimeTestUtil._
import org.apache.flink.types.Row
abstract class ArrayTypeTestBase extends ExpressionTestBase {
case class MyCaseClass(string: String, int: Int)
override def testData: Row = {
val testData = new Row(12)
testData.setField(0, null)
testData.setField(1, 42)
testData.setField(2, Array(1, 2, 3))
testData.setField(3, Array(UTCDate("1984-03-12"), UTCDate("1984-02-10")))
testData.setField(4, null)
testData.setField(5, Array(Array(1, 2, 3), null))
testData.setField(6, Array[Integer](1, null, null, 4))
testData.setField(7, Array(1, 2, 3, 4))
testData.setField(8, Array(4.0))
testData.setField(9, Array[Integer](1))
testData.setField(10, Array[Integer]())
testData.setField(11, Array[Integer](1))
testData
}
override def typeInfo: RowTypeInfo = {
new RowTypeInfo(
/* 0 */ Types.INT,
/* 1 */ Types.INT,
/* 2 */ PrimitiveArrayTypeInfo.INT_PRIMITIVE_ARRAY_TYPE_INFO,
/* 3 */ ObjectArrayTypeInfo.getInfoFor(Types.SQL_DATE),
/* 4 */ ObjectArrayTypeInfo.getInfoFor(ObjectArrayTypeInfo.getInfoFor(Types.INT)),
/* 5 */ ObjectArrayTypeInfo.getInfoFor(PrimitiveArrayTypeInfo.INT_PRIMITIVE_ARRAY_TYPE_INFO),
/* 6 */ ObjectArrayTypeInfo.getInfoFor(Types.INT),
/* 7 */ PrimitiveArrayTypeInfo.INT_PRIMITIVE_ARRAY_TYPE_INFO,
/* 8 */ PrimitiveArrayTypeInfo.DOUBLE_PRIMITIVE_ARRAY_TYPE_INFO,
/* 9 */ ObjectArrayTypeInfo.getInfoFor(Types.INT),
/* 10 */ ObjectArrayTypeInfo.getInfoFor(Types.INT),
/* 11 */ BasicArrayTypeInfo.INT_ARRAY_TYPE_INFO
)
}
}
| shaoxuan-wang/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/expressions/utils/ArrayTypeTestBase.scala | Scala | apache-2.0 | 2,672 |
import FullBorderExtras._
import scala.util.Random
import org.scalatest._
class FullBorderSpec extends FlatSpec with Matchers {
"split171" should "split (1, ..., 9) correctly" in {
val (l, mid, r) = (1 to 9).toArray.split171
assert(l==1 && mid.toList == List(2, 3, 4, 5, 6, 7, 8) && r == 9)
}
}
| matiasdahl/Boundary-Sudoku | src/test/scala/FullBorderSpec.scala | Scala | mit | 314 |
package models.join
import models.db._
/**
*
* @author ponkotuy
* Date: 14/10/22.
*/
case class SlotItemWithMaster(item: SlotItem, master: MasterSlotItem) {
def slotitemId = master.id
def name = master.name
def category = master.category
def iconType = master.iconType
def alv = item.alv
def colorClass: String = {
import tool.EquipIconType._
iconType.map {
case MainGunS | MainGunM | MainGunL | AntiShipBullet | Bomber => "danger"
case CaliberGun | AntiAirBullet | AntiAirGun | Fighter | ScoutSeaplane | Autogiro => "success"
case SubGun | Scout | Radar | EngineChamber | Searchlight | Flare => "warning"
case Torpedo | TorpedoBomber | Sonar | DepthBomb => "info"
case LandingCraft => "success"
case MaritimePartrolAircraft => "info"
case Armor => "purple"
case DamageControl | SimplifiedPackage | RepairFacility | Pilot | Command => ""
}
}.getOrElse("")
/** withLevelと言っているがついでに熟練度もオマケしちゃうぞ */
def nameWithLevel = master.name + item.withLevel + alvStr.getOrElse("")
def alvStr: Option[String] = alv.map { alv => s"(${alv})" }
def airSuperiority: Int = alv.map { alv => (alv - 1) * 25 / 6 }.getOrElse(0)
}
| nekoworkshop/MyFleetGirls | server/app/models/join/SlotItemWithMaster.scala | Scala | mit | 1,245 |
package scan
import java.nio.file._
import scala.compat.java8.StreamConverters._
import scala.collection.SortedSet
import cats._
import cats.data._
import cats.implicits._
import mouse.all._
import org.atnos.eff._
import org.atnos.eff.all._
import org.atnos.eff.syntax.all._
import org.atnos.eff.addon.monix._
import org.atnos.eff.addon.monix.task._
import org.atnos.eff.syntax.addon.monix.task._
import monix.eval._
import monix.execution._
import EffTypes._
import scala.concurrent.duration._
object Scanner {
val Usage = "Scanner <path> [number of largest files to track]"
type R = Fx.fx3[Task, Reader[Filesystem, ?], Either[String, ?]]
implicit val s = Scheduler(ExecutionModel.BatchedExecution(32))
def main(args: Array[String]): Unit = {
val program = scanReport[R](args)
program.runReader(DefaultFilesystem: Filesystem).runEither.runAsync.runSyncUnsafe(1.minute) match {
case Right(report) => println(report)
case Left(msg) => println(s"Scan failed: $msg")
}
}
def scanReport[R: _task: _filesystem](args: Array[String]): Eff[R, String] = for {
base <- optionEither(args.lift(0), s"Path to scan must be specified.\\n$Usage")
topN <- {
val n = args.lift(1).getOrElse("10")
fromEither(n.parseInt.leftMap(_ => s"Number of files must be numeric: $n"))
}
topNValid <- ???
fs <- ask[R, Filesystem]
scan <- pathScan[Fx.prepend[Reader[ScanConfig, ?], R]](fs.filePath(base)).
runReader[ScanConfig](ScanConfig(topNValid))
} yield ReportFormat.largeFilesReport(scan, base.toString)
def pathScan[R: _task: _filesystem: _config](path: FilePath): Eff[R, PathScan] = path match {
case f: File =>
for {
fs <- FileSize.ofFile(f)
} yield PathScan(SortedSet(fs), fs.size, 1)
case dir: Directory =>
for {
filesystem <- ask[R, Filesystem]
topN <- takeTopN
fileList <- taskDelay(filesystem.listFiles(dir))
childScans <- fileList.traverse(pathScan[R](_))
} yield childScans.combineAll(topN)
case Other(_) =>
PathScan.empty.pureEff[R]
}
def takeTopN[R: _config]: Eff[R, Monoid[PathScan]] = for {
scanConfig <- ask
} yield new Monoid[PathScan] {
def empty: PathScan = PathScan.empty
def combine(p1: PathScan, p2: PathScan): PathScan = PathScan(
p1.largestFiles.union(p2.largestFiles).take(scanConfig.topN),
p1.totalSize + p2.totalSize,
p1.totalCount + p2.totalCount
)
}
}
trait Filesystem {
def filePath(path: String): FilePath
def length(file: File): Long
def listFiles(directory: Directory): List[FilePath]
}
case object DefaultFilesystem extends Filesystem {
def filePath(path: String): FilePath =
if (Files.isRegularFile(Paths.get(path)))
File(path.toString)
else if (Files.isDirectory(Paths.get(path)))
Directory(path)
else
Other(path)
def length(file: File) = Files.size(Paths.get(file.path))
def listFiles(directory: Directory) = {
val files = Files.list(Paths.get(directory.path))
try files.toScala[List].flatMap(path => filePath(path.toString) match {
case Directory(path) => List(Directory(path))
case File(path) => List(File(path))
case Other(path) => List.empty
})
finally files.close()
}
}
case class ScanConfig(topN: Int)
case class PathScan(largestFiles: SortedSet[FileSize], totalSize: Long, totalCount: Long)
object PathScan {
def empty = PathScan(SortedSet.empty, 0, 0)
}
case class FileSize(file: File, size: Long)
object FileSize {
def ofFile[R: _filesystem](file: File): Eff[R, FileSize] = for {
fs <- ask
} yield FileSize(file, fs.length(file))
implicit val ordering: Ordering[FileSize] = Ordering.by[FileSize, Long](_.size).reverse
}
object EffTypes {
type _filesystem[R] = Reader[Filesystem, ?] <= R
type _config[R] = Reader[ScanConfig, ?] <= R
type _err[R] = Either[String, ?] <= R
}
//I prefer an closed set of disjoint cases over a series of isX(): Boolean tests, as provided by the Java API
//The problem with boolean test methods is they make it unclear what the complete set of possible states is, and which tests
//can overlap
sealed trait FilePath {
def path: String
}
case class File(path: String) extends FilePath
case class Directory(path: String) extends FilePath
case class Other(path: String) extends FilePath
//Common pure code that is unaffected by the migration to Eff
object ReportFormat {
def largeFilesReport(scan: PathScan, rootDir: String): String = {
if (scan.largestFiles.nonEmpty) {
s"Largest ${scan.largestFiles.size} file(s) found under path: $rootDir\\n" +
scan.largestFiles.map(fs => s"${(fs.size * 100)/scan.totalSize}% ${formatByteString(fs.size)} ${fs.file}").mkString("", "\\n", "\\n") +
s"${scan.totalCount} total files found, having total size ${formatByteString(scan.totalSize)} bytes.\\n"
}
else
s"No files found under path: $rootDir"
}
def formatByteString(bytes: Long): String = {
if (bytes < 1000)
s"${bytes} B"
else {
val exp = (Math.log(bytes) / Math.log(1000)).toInt
val pre = "KMGTPE".charAt(exp - 1)
s"%.1f ${pre}B".format(bytes / Math.pow(1000, exp))
}
}
}
| benhutchison/GettingWorkDoneWithExtensibleEffects | exerciseError/src/main/scala/scan/Scanner.scala | Scala | apache-2.0 | 5,250 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.sinks
import org.apache.flink.api.java.tuple.{Tuple2 => JTuple2}
import org.apache.flink.api.java.typeutils.{GenericTypeInfo, PojoTypeInfo, TupleTypeInfo}
import org.apache.flink.api.scala.typeutils.CaseClassTypeInfo
import org.apache.flink.table.api._
import org.apache.flink.table.catalog.{CatalogTable, ObjectIdentifier}
import org.apache.flink.table.connector.sink.DynamicTableSink
import org.apache.flink.table.connector.sink.abilities.{SupportsOverwrite, SupportsPartitioning}
import org.apache.flink.table.data.RowData
import org.apache.flink.table.operations.CatalogSinkModifyOperation
import org.apache.flink.table.planner.calcite.FlinkTypeFactory
import org.apache.flink.table.runtime.types.TypeInfoDataTypeConverter.fromDataTypeToTypeInfo
import org.apache.flink.table.runtime.typeutils.InternalTypeInfo
import org.apache.flink.table.sinks._
import org.apache.flink.table.types.DataType
import org.apache.flink.table.types.inference.TypeTransformations.{legacyDecimalToDefaultDecimal, legacyRawToTypeInfoRaw, toNullable}
import org.apache.flink.table.types.logical.utils.LogicalTypeCasts.{supportsAvoidingCast, supportsImplicitCast}
import org.apache.flink.table.types.logical.utils.LogicalTypeChecks
import org.apache.flink.table.types.logical.{LegacyTypeInformationType, RowType}
import org.apache.flink.table.types.utils.DataTypeUtils
import org.apache.flink.table.types.utils.TypeConversions.{fromLegacyInfoToDataType, fromLogicalToDataType}
import org.apache.flink.table.utils.{TableSchemaUtils, TypeMappingUtils}
import org.apache.flink.types.Row
import org.apache.calcite.plan.RelOptUtil
import org.apache.calcite.rel.RelNode
import _root_.scala.collection.JavaConversions._
/**
* Note: We aim to gradually port the logic in this class to [[DynamicSinkUtils]].
*/
object TableSinkUtils {
/**
* It checks whether the [[TableSink]] is compatible to the INSERT INTO clause, e.g.
* whether the sink is a [[PartitionableTableSink]] and the partitions are valid.
*
* @param sinkOperation The sink operation with the query that is supposed to be written.
* @param sinkIdentifier Tha path of the sink. It is needed just for logging. It does not
* participate in the validation.
* @param sink The sink that we want to write to.
* @param partitionKeys The partition keys of this table.
*/
def validateTableSink(
sinkOperation: CatalogSinkModifyOperation,
sinkIdentifier: ObjectIdentifier,
sink: TableSink[_],
partitionKeys: Seq[String]): Unit = {
// check partitions are valid
if (partitionKeys.nonEmpty) {
sink match {
case _: PartitionableTableSink =>
case _ => throw new ValidationException("We need PartitionableTableSink to write data to" +
s" partitioned table: $sinkIdentifier")
}
}
val staticPartitions = sinkOperation.getStaticPartitions
if (staticPartitions != null && !staticPartitions.isEmpty) {
staticPartitions.map(_._1) foreach { p =>
if (!partitionKeys.contains(p)) {
throw new ValidationException(s"Static partition column $p should be in the partition" +
s" fields list $partitionKeys for Table($sinkIdentifier).")
}
}
}
sink match {
case overwritableTableSink: OverwritableTableSink =>
overwritableTableSink.setOverwrite(sinkOperation.isOverwrite)
case _ =>
assert(!sinkOperation.isOverwrite, "INSERT OVERWRITE requires " +
s"${classOf[OverwritableTableSink].getSimpleName} but actually got " +
sink.getClass.getName)
}
}
/**
* Inferences the physical schema of [[TableSink]], the physical schema ignores change flag
* field and normalizes physical types (can be generic type or POJO type) into [[TableSchema]].
* @param queryLogicalType the logical type of query, will be used to full-fill sink physical
* schema if the sink physical type is not specified.
* @param sink the instance of [[TableSink]]
*/
def inferSinkPhysicalSchema(
queryLogicalType: RowType,
sink: TableSink[_]): TableSchema = {
val withChangeFlag = sink match {
case _: RetractStreamTableSink[_] | _: UpsertStreamTableSink[_] => true
case _: StreamTableSink[_] => false
case dsts: DataStreamTableSink[_] => dsts.withChangeFlag
}
inferSinkPhysicalSchema(sink.getConsumedDataType, queryLogicalType, withChangeFlag)
}
/**
* Inferences the physical schema of [[TableSink]], the physical schema ignores change flag
* field and normalizes physical types (can be generic type or POJO type) into [[TableSchema]].
*
* @param consumedDataType the consumed data type of sink
* @param queryLogicalType the logical type of query, will be used to full-fill sink physical
* schema if the sink physical type is not specified.
* @param withChangeFlag true if the emitted records contains change flags.
*/
def inferSinkPhysicalSchema(
consumedDataType: DataType,
queryLogicalType: RowType,
withChangeFlag: Boolean): TableSchema = {
// the requested output physical type which ignores the flag field
val requestedOutputType = inferSinkPhysicalDataType(
consumedDataType,
queryLogicalType,
withChangeFlag)
if (LogicalTypeChecks.isCompositeType(requestedOutputType.getLogicalType)) {
// if the requested output type is POJO, then we should ignore the POJO fields order,
// and infer the sink schema via field names, see expandPojoTypeToSchema().
fromDataTypeToTypeInfo(requestedOutputType) match {
case pj: PojoTypeInfo[_] => expandPojoTypeToSchema(pj, queryLogicalType)
case _ => DataTypeUtils.expandCompositeTypeToSchema(requestedOutputType)
}
} else {
// atomic type
TableSchema.builder().field("f0", requestedOutputType).build()
}
}
/**
* Expands a [[PojoTypeInfo]] to a corresponding [[TableSchema]].
* This is a special handling for [[PojoTypeInfo]], because fields of [[PojoTypeInfo]] is not
* in the defined order but the alphabet order. In order to match the query schema, we should
* reorder the Pojo schema.
*/
private def expandPojoTypeToSchema(
pojo: PojoTypeInfo[_],
queryLogicalType: RowType): TableSchema = {
val fieldNames = queryLogicalType.getFieldNames
// reorder pojo fields according to the query schema
val reorderedFields = fieldNames.map(name => {
val index = pojo.getFieldIndex(name)
if (index < 0) {
throw new TableException(s"$name is not found in ${pojo.toString}")
}
val fieldTypeInfo = pojo.getPojoFieldAt(index).getTypeInformation
val fieldDataType = fieldTypeInfo match {
case nestedPojo: PojoTypeInfo[_] =>
val nestedLogicalType = queryLogicalType.getFields()(index).getType.asInstanceOf[RowType]
expandPojoTypeToSchema(nestedPojo, nestedLogicalType).toRowDataType
case _ =>
fromLegacyInfoToDataType(fieldTypeInfo)
}
DataTypes.FIELD(name, fieldDataType)
})
DataTypeUtils.expandCompositeTypeToSchema(DataTypes.ROW(reorderedFields: _*))
}
/**
* Inferences the physical data type of [[TableSink]], the physical data type ignores
* the change flag field.
*
* @param consumedDataType the consumed data type of sink
* @param queryLogicalType the logical type of query, will be used to full-fill sink physical
* schema if the sink physical type is not specified.
* @param withChangeFlag true if the emitted records contains change flags.
*/
def inferSinkPhysicalDataType(
consumedDataType: DataType,
queryLogicalType: RowType,
withChangeFlag: Boolean): DataType = {
val consumedTypeInfo = consumedDataType.getLogicalType match {
case lt: LegacyTypeInformationType[_] => Some(lt.getTypeInformation)
case _ => None
}
if (consumedTypeInfo.isEmpty) {
return consumedDataType
}
val requestedTypeInfo = if (withChangeFlag) {
consumedTypeInfo.get match {
// Scala tuple
case t: CaseClassTypeInfo[_]
if t.getTypeClass == classOf[(_, _)] && t.getTypeAt(0) == Types.BOOLEAN =>
t.getTypeAt[Any](1)
// Java tuple
case t: TupleTypeInfo[_]
if t.getTypeClass == classOf[JTuple2[_, _]] && t.getTypeAt(0) == Types.BOOLEAN =>
t.getTypeAt[Any](1)
case _ => throw new TableException(
"Don't support " + consumedDataType + " conversion for the retract sink")
}
} else {
consumedTypeInfo.get
}
// The tpe may been inferred by invoking [[TypeExtractor.createTypeInfo]] based the
// class of the resulting type. For example, converts the given [[Table]] into
// an append [[DataStream]]. If the class is Row, then the return type only is
// [[GenericTypeInfo[Row]]. So it should convert to the [[RowTypeInfo]] in order
// to better serialize performance.
requestedTypeInfo match {
case gt: GenericTypeInfo[Row] if gt.getTypeClass == classOf[Row] =>
fromLogicalToDataType(queryLogicalType).bridgedTo(classOf[Row])
case gt: GenericTypeInfo[RowData] if gt.getTypeClass == classOf[RowData] =>
fromLogicalToDataType(queryLogicalType).bridgedTo(classOf[RowData])
case bt: InternalTypeInfo[RowData] =>
val fields = bt.toRowFieldNames.zip(bt.toRowFieldTypes).map { case (n, t) =>
DataTypes.FIELD(n, fromLogicalToDataType(t))
}
DataTypes.ROW(fields: _*).bridgedTo(classOf[RowData])
case _ =>
fromLegacyInfoToDataType(requestedTypeInfo)
}
}
/**
* Checks whether the logical schema (from DDL) and physical schema
* (from TableSink.getConsumedDataType()) of sink are compatible.
*
* @param catalogTable the catalog table of sink
* @param sink the instance of [[TableSink]]
* @param queryLogicalType the logical type of query
*/
def validateLogicalPhysicalTypesCompatible(
catalogTable: CatalogTable,
sink: TableSink[_],
queryLogicalType: RowType): Unit = {
// there may be generated columns in DDL, only get the physical part of DDL
val logicalSchema = TableSchemaUtils.getPhysicalSchema(catalogTable.getSchema)
// infer the physical schema from TableSink#getConsumedDataType
val physicalSchema = TableSinkUtils.inferSinkPhysicalSchema(
queryLogicalType,
sink)
// check for valid type info
if (logicalSchema.getFieldCount != physicalSchema.getFieldCount) {
throw new ValidationException("The field count of logical schema of the table does" +
" not match with the field count of physical schema\\n. " +
s"The logical schema: [${logicalSchema.getFieldDataTypes.mkString(",")}]\\n" +
s"The physical schema: [${physicalSchema.getFieldDataTypes.mkString(",")}].")
}
for (i <- 0 until logicalSchema.getFieldCount) {
val logicalFieldType = DataTypeUtils.transform(
logicalSchema.getFieldDataTypes()(i), toNullable) // ignore nullabilities
val logicalFieldName = logicalSchema.getFieldNames()(i)
val physicalFieldType = DataTypeUtils.transform(
physicalSchema.getFieldDataTypes()(i), toNullable) // ignore nullabilities
val physicalFieldName = physicalSchema.getFieldNames()(i)
TypeMappingUtils.checkPhysicalLogicalTypeCompatible(
physicalFieldType.getLogicalType,
logicalFieldType.getLogicalType,
physicalFieldName,
logicalFieldName,
false)
}
}
/**
* Gets the NOT NULL physical field indices on the [[CatalogTable]].
*/
def getNotNullFieldIndices(tableSchema: TableSchema): Array[Int] = {
val rowType = tableSchema.toPhysicalRowDataType.getLogicalType.asInstanceOf[RowType]
val fieldTypes = rowType.getFields.map(_.getType).toArray
fieldTypes.indices.filter { index =>
!fieldTypes(index).isNullable
}.toArray
}
}
| kl0u/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/sinks/TableSinkUtils.scala | Scala | apache-2.0 | 12,909 |
/*
* Copyright 2012-2014 Comcast Cable Communications Management, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.comcast.xfinity.sirius.admin
import com.comcast.xfinity.sirius.NiceTest
import com.comcast.xfinity.sirius.admin.SiriusMonitorReaderTest.DummyMonitor
import com.comcast.xfinity.sirius.api.SiriusConfiguration
import javax.management.{MBeanException, ObjectName, MBeanServerFactory, MBeanServer}
object SiriusMonitorReaderTest {
trait DummyMonitorMBean {
def getCash: String
}
class DummyMonitor(value: Either[Throwable, String]) extends DummyMonitorMBean {
def getCash = value match {
case Right(toReturn) => toReturn
case Left(toThrow) => throw toThrow
}
}
}
class SiriusMonitorReaderTest extends NiceTest {
val underTest = new SiriusMonitorReader
var mBeanServer: MBeanServer = _
before {
mBeanServer = MBeanServerFactory.createMBeanServer
}
after {
MBeanServerFactory.releaseMBeanServer(mBeanServer)
}
it ("must do nothing if no MBeanServer is configured") {
val underTest = new SiriusMonitorReader
val config = new SiriusConfiguration
assert(None === underTest.getMonitorStats(config))
}
it ("must expose metrics that exist") {
val goodObjectName = new ObjectName("com.comcast.xfinity.sirius:type=GoodJawn")
mBeanServer.registerMBean(new DummyMonitor(Right("Money")), goodObjectName)
val config = new SiriusConfiguration
config.setProp(SiriusConfiguration.MBEAN_SERVER, mBeanServer)
val expected = Map[String, Map[String, Any]](
"com.comcast.xfinity.sirius:type=GoodJawn" -> Map("Cash" -> "Money")
)
assert(Some(expected) === underTest.getMonitorStats(config))
}
it ("must sub in the exception as a string if a query fails") {
val anException = new Exception("BLOOOD")
val badObjectName = new ObjectName("com.comcast.xfinity.sirius:type=BadJawn")
mBeanServer.registerMBean(new DummyMonitor(Left(anException)), badObjectName)
val config = new SiriusConfiguration
config.setProp(SiriusConfiguration.MBEAN_SERVER, mBeanServer)
// XXX: it appears that this is how the MBeanServer is putting together the exception,
// just go with it
val expectedException = new MBeanException(anException, anException.toString)
val expected = Map[String, Map[String, Any]](
("com.comcast.xfinity.sirius:type=BadJawn" -> Map("Cash" -> expectedException.toString))
)
assert(Some(expected) === underTest.getMonitorStats(config))
}
it ("must not return anything for other MBeans") {
val objectName = new ObjectName("com.comcast.xfinity.not.sirius:type=OtherJawn")
mBeanServer.registerMBean(new DummyMonitor(Right("Money")), objectName)
val config = new SiriusConfiguration
config.setProp(SiriusConfiguration.MBEAN_SERVER, mBeanServer)
val expected = Map[String, Map[String, Any]]()
assert(Some(expected) === underTest.getMonitorStats(config))
}
}
| weggert/sirius | src/test/scala/com/comcast/xfinity/sirius/admin/SiriusMonitorReaderTest.scala | Scala | apache-2.0 | 3,497 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import java.io._
import scala.util.parsing.combinator.RegexParsers
import com.fasterxml.jackson.core._
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.expressions.codegen.CodegenFallback
import org.apache.spark.sql.catalyst.json._
import org.apache.spark.sql.catalyst.json.JsonInferSchema.inferField
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.Utils
private[this] sealed trait PathInstruction
private[this] object PathInstruction {
private[expressions] case object Subscript extends PathInstruction
private[expressions] case object Wildcard extends PathInstruction
private[expressions] case object Key extends PathInstruction
private[expressions] case class Index(index: Long) extends PathInstruction
private[expressions] case class Named(name: String) extends PathInstruction
}
private[this] sealed trait WriteStyle
private[this] object WriteStyle {
private[expressions] case object RawStyle extends WriteStyle
private[expressions] case object QuotedStyle extends WriteStyle
private[expressions] case object FlattenStyle extends WriteStyle
}
private[this] object JsonPathParser extends RegexParsers {
import PathInstruction._
def root: Parser[Char] = '$'
def long: Parser[Long] = "\\\\d+".r ^? {
case x => x.toLong
}
// parse `[*]` and `[123]` subscripts
def subscript: Parser[List[PathInstruction]] =
for {
operand <- '[' ~> ('*' ^^^ Wildcard | long ^^ Index) <~ ']'
} yield {
Subscript :: operand :: Nil
}
// parse `.name` or `['name']` child expressions
def named: Parser[List[PathInstruction]] =
for {
name <- '.' ~> "[^\\\\.\\\\[]+".r | "['" ~> "[^\\\\'\\\\?]+".r <~ "']"
} yield {
Key :: Named(name) :: Nil
}
// child wildcards: `..`, `.*` or `['*']`
def wildcard: Parser[List[PathInstruction]] =
(".*" | "['*']") ^^^ List(Wildcard)
def node: Parser[List[PathInstruction]] =
wildcard |
named |
subscript
val expression: Parser[List[PathInstruction]] = {
phrase(root ~> rep(node) ^^ (x => x.flatten))
}
def parse(str: String): Option[List[PathInstruction]] = {
this.parseAll(expression, str) match {
case Success(result, _) =>
Some(result)
case NoSuccess(msg, next) =>
None
}
}
}
private[this] object SharedFactory {
val jsonFactory = new JsonFactory()
// Enabled for Hive compatibility
jsonFactory.enable(JsonParser.Feature.ALLOW_UNQUOTED_CONTROL_CHARS)
}
/**
* Extracts json object from a json string based on json path specified, and returns json string
* of the extracted json object. It will return null if the input json string is invalid.
*/
@ExpressionDescription(
usage = "_FUNC_(json_txt, path) - Extracts a json object from `path`.",
examples = """
Examples:
> SELECT _FUNC_('{"a":"b"}', '$.a');
b
""")
case class GetJsonObject(json: Expression, path: Expression)
extends BinaryExpression with ExpectsInputTypes with CodegenFallback {
import com.fasterxml.jackson.core.JsonToken._
import PathInstruction._
import SharedFactory._
import WriteStyle._
override def left: Expression = json
override def right: Expression = path
override def inputTypes: Seq[DataType] = Seq(StringType, StringType)
override def dataType: DataType = StringType
override def nullable: Boolean = true
override def prettyName: String = "get_json_object"
@transient private lazy val parsedPath = parsePath(path.eval().asInstanceOf[UTF8String])
override def eval(input: InternalRow): Any = {
val jsonStr = json.eval(input).asInstanceOf[UTF8String]
if (jsonStr == null) {
return null
}
val parsed = if (path.foldable) {
parsedPath
} else {
parsePath(path.eval(input).asInstanceOf[UTF8String])
}
if (parsed.isDefined) {
try {
/* We know the bytes are UTF-8 encoded. Pass a Reader to avoid having Jackson
detect character encoding which could fail for some malformed strings */
Utils.tryWithResource(CreateJacksonParser.utf8String(jsonFactory, jsonStr)) { parser =>
val output = new ByteArrayOutputStream()
val matched = Utils.tryWithResource(
jsonFactory.createGenerator(output, JsonEncoding.UTF8)) { generator =>
parser.nextToken()
evaluatePath(parser, generator, RawStyle, parsed.get)
}
if (matched) {
UTF8String.fromBytes(output.toByteArray)
} else {
null
}
}
} catch {
case _: JsonProcessingException => null
}
} else {
null
}
}
private def parsePath(path: UTF8String): Option[List[PathInstruction]] = {
if (path != null) {
JsonPathParser.parse(path.toString)
} else {
None
}
}
// advance to the desired array index, assumes to start at the START_ARRAY token
private def arrayIndex(p: JsonParser, f: () => Boolean): Long => Boolean = {
case _ if p.getCurrentToken == END_ARRAY =>
// terminate, nothing has been written
false
case 0 =>
// we've reached the desired index
val dirty = f()
while (p.nextToken() != END_ARRAY) {
// advance the token stream to the end of the array
p.skipChildren()
}
dirty
case i if i > 0 =>
// skip this token and evaluate the next
p.skipChildren()
p.nextToken()
arrayIndex(p, f)(i - 1)
}
/**
* Evaluate a list of JsonPath instructions, returning a bool that indicates if any leaf nodes
* have been written to the generator
*/
private def evaluatePath(
p: JsonParser,
g: JsonGenerator,
style: WriteStyle,
path: List[PathInstruction]): Boolean = {
(p.getCurrentToken, path) match {
case (VALUE_STRING, Nil) if style == RawStyle =>
// there is no array wildcard or slice parent, emit this string without quotes
if (p.hasTextCharacters) {
g.writeRaw(p.getTextCharacters, p.getTextOffset, p.getTextLength)
} else {
g.writeRaw(p.getText)
}
true
case (START_ARRAY, Nil) if style == FlattenStyle =>
// flatten this array into the parent
var dirty = false
while (p.nextToken() != END_ARRAY) {
dirty |= evaluatePath(p, g, style, Nil)
}
dirty
case (_, Nil) =>
// general case: just copy the child tree verbatim
g.copyCurrentStructure(p)
true
case (START_OBJECT, Key :: xs) =>
var dirty = false
while (p.nextToken() != END_OBJECT) {
if (dirty) {
// once a match has been found we can skip other fields
p.skipChildren()
} else {
dirty = evaluatePath(p, g, style, xs)
}
}
dirty
case (START_ARRAY, Subscript :: Wildcard :: Subscript :: Wildcard :: xs) =>
// special handling for the non-structure preserving double wildcard behavior in Hive
var dirty = false
g.writeStartArray()
while (p.nextToken() != END_ARRAY) {
dirty |= evaluatePath(p, g, FlattenStyle, xs)
}
g.writeEndArray()
dirty
case (START_ARRAY, Subscript :: Wildcard :: xs) if style != QuotedStyle =>
// retain Flatten, otherwise use Quoted... cannot use Raw within an array
val nextStyle = style match {
case RawStyle => QuotedStyle
case FlattenStyle => FlattenStyle
case QuotedStyle => throw new IllegalStateException()
}
// temporarily buffer child matches, the emitted json will need to be
// modified slightly if there is only a single element written
val buffer = new StringWriter()
var dirty = 0
Utils.tryWithResource(jsonFactory.createGenerator(buffer)) { flattenGenerator =>
flattenGenerator.writeStartArray()
while (p.nextToken() != END_ARRAY) {
// track the number of array elements and only emit an outer array if
// we've written more than one element, this matches Hive's behavior
dirty += (if (evaluatePath(p, flattenGenerator, nextStyle, xs)) 1 else 0)
}
flattenGenerator.writeEndArray()
}
val buf = buffer.getBuffer
if (dirty > 1) {
g.writeRawValue(buf.toString)
} else if (dirty == 1) {
// remove outer array tokens
g.writeRawValue(buf.substring(1, buf.length()-1))
} // else do not write anything
dirty > 0
case (START_ARRAY, Subscript :: Wildcard :: xs) =>
var dirty = false
g.writeStartArray()
while (p.nextToken() != END_ARRAY) {
// wildcards can have multiple matches, continually update the dirty count
dirty |= evaluatePath(p, g, QuotedStyle, xs)
}
g.writeEndArray()
dirty
case (START_ARRAY, Subscript :: Index(idx) :: (xs@Subscript :: Wildcard :: _)) =>
p.nextToken()
// we're going to have 1 or more results, switch to QuotedStyle
arrayIndex(p, () => evaluatePath(p, g, QuotedStyle, xs))(idx)
case (START_ARRAY, Subscript :: Index(idx) :: xs) =>
p.nextToken()
arrayIndex(p, () => evaluatePath(p, g, style, xs))(idx)
case (FIELD_NAME, Named(name) :: xs) if p.getCurrentName == name =>
// exact field match
if (p.nextToken() != JsonToken.VALUE_NULL) {
evaluatePath(p, g, style, xs)
} else {
false
}
case (FIELD_NAME, Wildcard :: xs) =>
// wildcard field match
p.nextToken()
evaluatePath(p, g, style, xs)
case _ =>
p.skipChildren()
false
}
}
}
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(jsonStr, p1, p2, ..., pn) - Returns a tuple like the function get_json_object, but it takes multiple names. All the input parameters and output column types are string.",
examples = """
Examples:
> SELECT _FUNC_('{"a":1, "b":2}', 'a', 'b');
1 2
""")
// scalastyle:on line.size.limit
case class JsonTuple(children: Seq[Expression])
extends Generator with CodegenFallback {
import SharedFactory._
override def nullable: Boolean = {
// a row is always returned
false
}
// if processing fails this shared value will be returned
@transient private lazy val nullRow: Seq[InternalRow] =
new GenericInternalRow(Array.ofDim[Any](fieldExpressions.length)) :: Nil
// the json body is the first child
@transient private lazy val jsonExpr: Expression = children.head
// the fields to query are the remaining children
@transient private lazy val fieldExpressions: Seq[Expression] = children.tail
// eagerly evaluate any foldable the field names
@transient private lazy val foldableFieldNames: IndexedSeq[Option[String]] = {
fieldExpressions.map {
case expr if expr.foldable => Option(expr.eval()).map(_.asInstanceOf[UTF8String].toString)
case _ => null
}.toIndexedSeq
}
// and count the number of foldable fields, we'll use this later to optimize evaluation
@transient private lazy val constantFields: Int = foldableFieldNames.count(_ != null)
override def elementSchema: StructType = StructType(fieldExpressions.zipWithIndex.map {
case (_, idx) => StructField(s"c$idx", StringType, nullable = true)
})
override def prettyName: String = "json_tuple"
override def checkInputDataTypes(): TypeCheckResult = {
if (children.length < 2) {
TypeCheckResult.TypeCheckFailure(s"$prettyName requires at least two arguments")
} else if (children.forall(child => StringType.acceptsType(child.dataType))) {
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(s"$prettyName requires that all arguments are strings")
}
}
override def eval(input: InternalRow): TraversableOnce[InternalRow] = {
val json = jsonExpr.eval(input).asInstanceOf[UTF8String]
if (json == null) {
return nullRow
}
try {
/* We know the bytes are UTF-8 encoded. Pass a Reader to avoid having Jackson
detect character encoding which could fail for some malformed strings */
Utils.tryWithResource(CreateJacksonParser.utf8String(jsonFactory, json)) { parser =>
parseRow(parser, input)
}
} catch {
case _: JsonProcessingException =>
nullRow
}
}
private def parseRow(parser: JsonParser, input: InternalRow): Seq[InternalRow] = {
// only objects are supported
if (parser.nextToken() != JsonToken.START_OBJECT) {
return nullRow
}
// evaluate the field names as String rather than UTF8String to
// optimize lookups from the json token, which is also a String
val fieldNames = if (constantFields == fieldExpressions.length) {
// typically the user will provide the field names as foldable expressions
// so we can use the cached copy
foldableFieldNames.map(_.orNull)
} else if (constantFields == 0) {
// none are foldable so all field names need to be evaluated from the input row
fieldExpressions.map(_.eval(input).asInstanceOf[UTF8String].toString)
} else {
// if there is a mix of constant and non-constant expressions
// prefer the cached copy when available
foldableFieldNames.zip(fieldExpressions).map {
case (null, expr) => expr.eval(input).asInstanceOf[UTF8String].toString
case (fieldName, _) => fieldName.orNull
}
}
val row = Array.ofDim[Any](fieldNames.length)
// start reading through the token stream, looking for any requested field names
while (parser.nextToken() != JsonToken.END_OBJECT) {
if (parser.getCurrentToken == JsonToken.FIELD_NAME) {
// check to see if this field is desired in the output
val jsonField = parser.getCurrentName
var idx = fieldNames.indexOf(jsonField)
if (idx >= 0) {
// it is, copy the child tree to the correct location in the output row
val output = new ByteArrayOutputStream()
// write the output directly to UTF8 encoded byte array
if (parser.nextToken() != JsonToken.VALUE_NULL) {
Utils.tryWithResource(jsonFactory.createGenerator(output, JsonEncoding.UTF8)) {
generator => copyCurrentStructure(generator, parser)
}
val jsonValue = UTF8String.fromBytes(output.toByteArray)
// SPARK-21804: json_tuple returns null values within repeated columns
// except the first one; so that we need to check the remaining fields.
do {
row(idx) = jsonValue
idx = fieldNames.indexOf(jsonField, idx + 1)
} while (idx >= 0)
}
}
}
// always skip children, it's cheap enough to do even if copyCurrentStructure was called
parser.skipChildren()
}
new GenericInternalRow(row) :: Nil
}
private def copyCurrentStructure(generator: JsonGenerator, parser: JsonParser): Unit = {
parser.getCurrentToken match {
// if the user requests a string field it needs to be returned without enclosing
// quotes which is accomplished via JsonGenerator.writeRaw instead of JsonGenerator.write
case JsonToken.VALUE_STRING if parser.hasTextCharacters =>
// slight optimization to avoid allocating a String instance, though the characters
// still have to be decoded... Jackson doesn't have a way to access the raw bytes
generator.writeRaw(parser.getTextCharacters, parser.getTextOffset, parser.getTextLength)
case JsonToken.VALUE_STRING =>
// the normal String case, pass it through to the output without enclosing quotes
generator.writeRaw(parser.getText)
case JsonToken.VALUE_NULL =>
// a special case that needs to be handled outside of this method.
// if a requested field is null, the result must be null. the easiest
// way to achieve this is just by ignoring null tokens entirely
throw new IllegalStateException("Do not attempt to copy a null field")
case _ =>
// handle other types including objects, arrays, booleans and numbers
generator.copyCurrentStructure(parser)
}
}
}
/**
* Converts an json input string to a [[StructType]] or [[ArrayType]] of [[StructType]]s
* with the specified schema.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(jsonStr, schema[, options]) - Returns a struct value with the given `jsonStr` and `schema`.",
examples = """
Examples:
> SELECT _FUNC_('{"a":1, "b":0.8}', 'a INT, b DOUBLE');
{"a":1, "b":0.8}
> SELECT _FUNC_('{"time":"26/08/2015"}', 'time Timestamp', map('timestampFormat', 'dd/MM/yyyy'));
{"time":"2015-08-26 00:00:00.0"}
""",
since = "2.2.0")
// scalastyle:on line.size.limit
case class JsonToStructs(
schema: DataType,
options: Map[String, String],
child: Expression,
timeZoneId: Option[String] = None)
extends UnaryExpression with TimeZoneAwareExpression with CodegenFallback with ExpectsInputTypes {
val forceNullableSchema = SQLConf.get.getConf(SQLConf.FROM_JSON_FORCE_NULLABLE_SCHEMA)
// The JSON input data might be missing certain fields. We force the nullability
// of the user-provided schema to avoid data corruptions. In particular, the parquet-mr encoder
// can generate incorrect files if values are missing in columns declared as non-nullable.
val nullableSchema = if (forceNullableSchema) schema.asNullable else schema
override def nullable: Boolean = true
// Used in `FunctionRegistry`
def this(child: Expression, schema: Expression, options: Map[String, String]) =
this(
schema = JsonExprUtils.evalSchemaExpr(schema),
options = options,
child = child,
timeZoneId = None)
def this(child: Expression, schema: Expression) = this(child, schema, Map.empty[String, String])
def this(child: Expression, schema: Expression, options: Expression) =
this(
schema = JsonExprUtils.evalSchemaExpr(schema),
options = JsonExprUtils.convertToMapData(options),
child = child,
timeZoneId = None)
override def checkInputDataTypes(): TypeCheckResult = nullableSchema match {
case _: StructType | ArrayType(_: StructType, _) | _: MapType =>
super.checkInputDataTypes()
case _ => TypeCheckResult.TypeCheckFailure(
s"Input schema ${nullableSchema.catalogString} must be a struct or an array of structs.")
}
@transient
lazy val rowSchema = nullableSchema match {
case st: StructType => st
case ArrayType(st: StructType, _) => st
case mt: MapType => mt
}
// This converts parsed rows to the desired output by the given schema.
@transient
lazy val converter = nullableSchema match {
case _: StructType =>
(rows: Seq[InternalRow]) => if (rows.length == 1) rows.head else null
case ArrayType(_: StructType, _) =>
(rows: Seq[InternalRow]) => new GenericArrayData(rows)
case _: MapType =>
(rows: Seq[InternalRow]) => rows.head.getMap(0)
}
@transient
lazy val parser =
new JacksonParser(
rowSchema,
new JSONOptions(options + ("mode" -> FailFastMode.name), timeZoneId.get))
override def dataType: DataType = nullableSchema
override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
copy(timeZoneId = Option(timeZoneId))
override def nullSafeEval(json: Any): Any = {
// When input is,
// - `null`: `null`.
// - invalid json: `null`.
// - empty string: `null`.
//
// When the schema is array,
// - json array: `Array(Row(...), ...)`
// - json object: `Array(Row(...))`
// - empty json array: `Array()`.
// - empty json object: `Array(Row(null))`.
//
// When the schema is a struct,
// - json object/array with single element: `Row(...)`
// - json array with multiple elements: `null`
// - empty json array: `null`.
// - empty json object: `Row(null)`.
// We need `null` if the input string is an empty string. `JacksonParser` can
// deal with this but produces `Nil`.
if (json.toString.trim.isEmpty) return null
try {
converter(parser.parse(
json.asInstanceOf[UTF8String],
CreateJacksonParser.utf8String,
identity[UTF8String]))
} catch {
case _: BadRecordException => null
}
}
override def inputTypes: Seq[AbstractDataType] = StringType :: Nil
override def sql: String = schema match {
case _: MapType => "entries"
case _ => super.sql
}
}
/**
* Converts a [[StructType]], [[ArrayType]] of [[StructType]]s, [[MapType]]
* or [[ArrayType]] of [[MapType]]s to a json output string.
*/
// scalastyle:off line.size.limit
@ExpressionDescription(
usage = "_FUNC_(expr[, options]) - Returns a json string with a given struct value",
examples = """
Examples:
> SELECT _FUNC_(named_struct('a', 1, 'b', 2));
{"a":1,"b":2}
> SELECT _FUNC_(named_struct('time', to_timestamp('2015-08-26', 'yyyy-MM-dd')), map('timestampFormat', 'dd/MM/yyyy'));
{"time":"26/08/2015"}
> SELECT _FUNC_(array(named_struct('a', 1, 'b', 2));
[{"a":1,"b":2}]
> SELECT _FUNC_(map('a', named_struct('b', 1)));
{"a":{"b":1}}
> SELECT _FUNC_(map(named_struct('a', 1),named_struct('b', 2)));
{"[1]":{"b":2}}
> SELECT _FUNC_(map('a', 1));
{"a":1}
> SELECT _FUNC_(array((map('a', 1))));
[{"a":1}]
""",
since = "2.2.0")
// scalastyle:on line.size.limit
case class StructsToJson(
options: Map[String, String],
child: Expression,
timeZoneId: Option[String] = None)
extends UnaryExpression with TimeZoneAwareExpression with CodegenFallback with ExpectsInputTypes {
override def nullable: Boolean = true
def this(options: Map[String, String], child: Expression) = this(options, child, None)
// Used in `FunctionRegistry`
def this(child: Expression) = this(Map.empty, child, None)
def this(child: Expression, options: Expression) =
this(
options = JsonExprUtils.convertToMapData(options),
child = child,
timeZoneId = None)
@transient
lazy val writer = new CharArrayWriter()
@transient
lazy val gen = new JacksonGenerator(
rowSchema, writer, new JSONOptions(options, timeZoneId.get))
@transient
lazy val rowSchema = child.dataType match {
case st: StructType => st
case ArrayType(st: StructType, _) => st
case mt: MapType => mt
case ArrayType(mt: MapType, _) => mt
}
// This converts rows to the JSON output according to the given schema.
@transient
lazy val converter: Any => UTF8String = {
def getAndReset(): UTF8String = {
gen.flush()
val json = writer.toString
writer.reset()
UTF8String.fromString(json)
}
child.dataType match {
case _: StructType =>
(row: Any) =>
gen.write(row.asInstanceOf[InternalRow])
getAndReset()
case ArrayType(_: StructType, _) =>
(arr: Any) =>
gen.write(arr.asInstanceOf[ArrayData])
getAndReset()
case _: MapType =>
(map: Any) =>
gen.write(map.asInstanceOf[MapData])
getAndReset()
case ArrayType(_: MapType, _) =>
(arr: Any) =>
gen.write(arr.asInstanceOf[ArrayData])
getAndReset()
}
}
override def dataType: DataType = StringType
override def checkInputDataTypes(): TypeCheckResult = child.dataType match {
case _: StructType | ArrayType(_: StructType, _) =>
try {
JacksonUtils.verifySchema(rowSchema.asInstanceOf[StructType])
TypeCheckResult.TypeCheckSuccess
} catch {
case e: UnsupportedOperationException =>
TypeCheckResult.TypeCheckFailure(e.getMessage)
}
case _: MapType | ArrayType(_: MapType, _) =>
// TODO: let `JacksonUtils.verifySchema` verify a `MapType`
try {
val st = StructType(StructField("a", rowSchema.asInstanceOf[MapType]) :: Nil)
JacksonUtils.verifySchema(st)
TypeCheckResult.TypeCheckSuccess
} catch {
case e: UnsupportedOperationException =>
TypeCheckResult.TypeCheckFailure(e.getMessage)
}
case _ => TypeCheckResult.TypeCheckFailure(
s"Input type ${child.dataType.catalogString} must be a struct, array of structs or " +
"a map or array of map.")
}
override def withTimeZone(timeZoneId: String): TimeZoneAwareExpression =
copy(timeZoneId = Option(timeZoneId))
override def nullSafeEval(value: Any): Any = converter(value)
override def inputTypes: Seq[AbstractDataType] = TypeCollection(ArrayType, StructType) :: Nil
}
/**
* A function infers schema of JSON string.
*/
@ExpressionDescription(
usage = "_FUNC_(json[, options]) - Returns schema in the DDL format of JSON string.",
examples = """
Examples:
> SELECT _FUNC_('[{"col":0}]');
array<struct<col:int>>
""",
since = "2.4.0")
case class SchemaOfJson(child: Expression)
extends UnaryExpression with String2StringExpression with CodegenFallback {
private val jsonOptions = new JSONOptions(Map.empty, "UTC")
private val jsonFactory = new JsonFactory()
jsonOptions.setJacksonOptions(jsonFactory)
override def convert(v: UTF8String): UTF8String = {
val dt = Utils.tryWithResource(CreateJacksonParser.utf8String(jsonFactory, v)) { parser =>
parser.nextToken()
inferField(parser, jsonOptions)
}
UTF8String.fromString(dt.catalogString)
}
}
object JsonExprUtils {
def evalSchemaExpr(exp: Expression): DataType = exp match {
case Literal(s, StringType) => DataType.fromDDL(s.toString)
case e @ SchemaOfJson(_: Literal) =>
val ddlSchema = e.eval().asInstanceOf[UTF8String]
DataType.fromDDL(ddlSchema.toString)
case e => throw new AnalysisException(
"Schema should be specified in DDL format as a string literal" +
s" or output of the schema_of_json function instead of ${e.sql}")
}
def convertToMapData(exp: Expression): Map[String, String] = exp match {
case m: CreateMap
if m.dataType.acceptsType(MapType(StringType, StringType, valueContainsNull = false)) =>
val arrayMap = m.eval().asInstanceOf[ArrayBasedMapData]
ArrayBasedMapData.toScalaMap(arrayMap).map { case (key, value) =>
key.toString -> value.toString
}
case m: CreateMap =>
throw new AnalysisException(
s"A type of keys and values in map() must be string, but got ${m.dataType.catalogString}")
case _ =>
throw new AnalysisException("Must use a map() function for options")
}
}
| tejasapatil/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/jsonExpressions.scala | Scala | apache-2.0 | 27,881 |
/**
* Copyright 2014 Dropbox, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package djinni
import djinni.ast._
import java.io._
import djinni.generatorTools._
import djinni.meta._
import djinni.syntax.Error
import djinni.writer.IndentWriter
import scala.language.implicitConversions
import scala.collection.mutable
package object generatorTools {
case class Spec(
javaOutFolder: Option[File],
javaPackage: Option[String],
javaIdentStyle: JavaIdentStyle,
javaCppException: Option[String],
javaAnnotation: Option[String],
javaNullableAnnotation: Option[String],
javaNonnullAnnotation: Option[String],
cppOutFolder: Option[File],
cppHeaderOutFolder: Option[File],
cppIncludePrefix: String,
cppNamespace: String,
cppIdentStyle: CppIdentStyle,
cppFileIdentStyle: IdentConverter,
cppOptionalTemplate: String,
cppOptionalHeader: String,
cppEnumHashWorkaround: Boolean,
cppNnHeader: Option[String],
cppNnType: Option[String],
cppNnCheckExpression: Option[String],
jniOutFolder: Option[File],
jniHeaderOutFolder: Option[File],
jniIncludePrefix: String,
jniIncludeCppPrefix: String,
jniNamespace: String,
jniClassIdentStyle: IdentConverter,
jniFileIdentStyle: IdentConverter,
jniBaseLibIncludePrefix: String,
cppExt: String,
cppHeaderExt: String,
objcOutFolder: Option[File],
objcppOutFolder: Option[File],
objcIdentStyle: ObjcIdentStyle,
objcFileIdentStyle: IdentConverter,
objcppExt: String,
objcHeaderExt: String,
objcIncludePrefix: String,
objcppIncludePrefix: String,
objcppIncludeCppPrefix: String,
objcppIncludeObjcPrefix: String,
objcppNamespace: String,
objcBaseLibIncludePrefix: String,
outFileListWriter: Option[Writer],
skipGeneration: Boolean,
yamlOutFolder: Option[File],
yamlOutFile: Option[String],
yamlPrefix: String)
def preComma(s: String) = {
if (s.isEmpty) s else ", " + s
}
def q(s: String) = '"' + s + '"'
def firstUpper(token: String) = token.charAt(0).toUpper + token.substring(1)
type IdentConverter = String => String
case class CppIdentStyle(ty: IdentConverter, enumType: IdentConverter, typeParam: IdentConverter,
method: IdentConverter, field: IdentConverter, local: IdentConverter,
enum: IdentConverter, const: IdentConverter)
case class JavaIdentStyle(ty: IdentConverter, typeParam: IdentConverter,
method: IdentConverter, field: IdentConverter, local: IdentConverter,
enum: IdentConverter, const: IdentConverter)
case class ObjcIdentStyle(ty: IdentConverter, typeParam: IdentConverter,
method: IdentConverter, field: IdentConverter, local: IdentConverter,
enum: IdentConverter, const: IdentConverter)
object IdentStyle {
val camelUpper = (s: String) => s.split('_').map(firstUpper).mkString
val camelLower = (s: String) => {
val parts = s.split('_')
parts.head + parts.tail.map(firstUpper).mkString
}
val underLower = (s: String) => s
val underUpper = (s: String) => s.split('_').map(firstUpper).mkString("_")
val underCaps = (s: String) => s.toUpperCase
val prefix = (prefix: String, suffix: IdentConverter) => (s: String) => prefix + suffix(s)
val javaDefault = JavaIdentStyle(camelUpper, camelUpper, camelLower, camelLower, camelLower, underCaps, underCaps)
val cppDefault = CppIdentStyle(camelUpper, camelUpper, camelUpper, underLower, underLower, underLower, underCaps, underCaps)
val objcDefault = ObjcIdentStyle(camelUpper, camelUpper, camelLower, camelLower, camelLower, camelUpper, camelUpper)
val styles = Map(
"FooBar" -> camelUpper,
"fooBar" -> camelLower,
"foo_bar" -> underLower,
"Foo_Bar" -> underUpper,
"FOO_BAR" -> underCaps)
def infer(input: String): Option[IdentConverter] = {
styles.foreach((e) => {
val (str, func) = e
if (input endsWith str) {
val diff = input.length - str.length
return Some(if (diff > 0) {
val before = input.substring(0, diff)
prefix(before, func)
} else {
func
})
}
})
None
}
}
final case class SkipFirst() {
private var first = true
def apply(f: => Unit) {
if (first) {
first = false
}
else {
f
}
}
}
case class GenerateException(message: String) extends java.lang.Exception(message)
def createFolder(name: String, folder: File) {
folder.mkdirs()
if (folder.exists) {
if (!folder.isDirectory) {
throw new GenerateException(s"Unable to create $name folder at ${q(folder.getPath)}, there's something in the way.")
}
} else {
throw new GenerateException(s"Unable to create $name folder at ${q(folder.getPath)}.")
}
}
def generate(idl: Seq[TypeDecl], spec: Spec): Option[String] = {
try {
if (spec.cppOutFolder.isDefined) {
if (!spec.skipGeneration) {
createFolder("C++", spec.cppOutFolder.get)
createFolder("C++ header", spec.cppHeaderOutFolder.get)
}
new CppGenerator(spec).generate(idl)
}
if (spec.javaOutFolder.isDefined) {
if (!spec.skipGeneration) {
createFolder("Java", spec.javaOutFolder.get)
}
new JavaGenerator(spec).generate(idl)
}
if (spec.jniOutFolder.isDefined) {
if (!spec.skipGeneration) {
createFolder("JNI C++", spec.jniOutFolder.get)
createFolder("JNI C++ header", spec.jniHeaderOutFolder.get)
}
new JNIGenerator(spec).generate(idl)
}
if (spec.objcOutFolder.isDefined) {
if (!spec.skipGeneration) {
createFolder("Objective-C", spec.objcOutFolder.get)
}
new ObjcGenerator(spec).generate(idl)
}
if (spec.objcppOutFolder.isDefined) {
if (!spec.skipGeneration) {
createFolder("Objective-C++", spec.objcppOutFolder.get)
}
new ObjcppGenerator(spec).generate(idl)
}
if (spec.yamlOutFolder.isDefined) {
if (!spec.skipGeneration) {
createFolder("YAML", spec.yamlOutFolder.get)
new YamlGenerator(spec).generate(idl)
}
}
None
}
catch {
case GenerateException(message) => Some(message)
}
}
sealed abstract class SymbolReference
case class ImportRef(arg: String) extends SymbolReference // Already contains <> or "" in C contexts
case class DeclRef(decl: String, namespace: Option[String]) extends SymbolReference
}
abstract class Generator(spec: Spec)
{
protected val writtenFiles = mutable.HashMap[String,String]()
protected def createFile(folder: File, fileName: String, makeWriter: OutputStreamWriter => IndentWriter, f: IndentWriter => Unit): Unit = {
if (spec.outFileListWriter.isDefined) {
spec.outFileListWriter.get.write(new File(folder, fileName).getPath + "\\n")
}
if (spec.skipGeneration) {
return
}
val file = new File(folder, fileName)
val cp = file.getCanonicalPath
writtenFiles.put(cp.toLowerCase, cp) match {
case Some(existing) =>
if (existing == cp) {
throw GenerateException("Refusing to write \\"" + file.getPath + "\\"; we already wrote a file to that path.")
} else {
throw GenerateException("Refusing to write \\"" + file.getPath + "\\"; we already wrote a file to a path that is the same when lower-cased: \\"" + existing + "\\".")
}
case _ =>
}
val fout = new FileOutputStream(file)
try {
val out = new OutputStreamWriter(fout, "UTF-8")
f(makeWriter(out))
out.flush()
}
finally {
fout.close()
}
}
protected def createFile(folder: File, fileName: String, f: IndentWriter => Unit): Unit = createFile(folder, fileName, out => new IndentWriter(out), f)
implicit def identToString(ident: Ident): String = ident.name
val idCpp = spec.cppIdentStyle
val idJava = spec.javaIdentStyle
val idObjc = spec.objcIdentStyle
def wrapNamespace(w: IndentWriter, ns: String, f: IndentWriter => Unit) {
ns match {
case "" => f(w)
case s =>
val parts = s.split("::")
w.wl(parts.map("namespace "+_+" {").mkString(" ")).wl
f(w)
w.wl
w.wl(parts.map(p => "}").mkString(" ") + s" // namespace $s")
}
}
def wrapAnonymousNamespace(w: IndentWriter, f: IndentWriter => Unit) {
w.wl("namespace { // anonymous namespace")
w.wl
f(w)
w.wl
w.wl("} // end anonymous namespace")
}
def writeHppFileGeneric(folder: File, namespace: String, fileIdentStyle: IdentConverter)(name: String, origin: String, includes: Iterable[String], fwds: Iterable[String], f: IndentWriter => Unit, f2: IndentWriter => Unit) {
createFile(folder, fileIdentStyle(name) + "." + spec.cppHeaderExt, (w: IndentWriter) => {
w.wl("// AUTOGENERATED FILE - DO NOT MODIFY!")
w.wl("// This file generated by Djinni from " + origin)
w.wl
w.wl("#pragma once")
if (includes.nonEmpty) {
w.wl
includes.foreach(w.wl)
}
w.wl
wrapNamespace(w, namespace,
(w: IndentWriter) => {
if (fwds.nonEmpty) {
fwds.foreach(w.wl)
w.wl
}
f(w)
}
)
f2(w)
})
}
def writeCppFileGeneric(folder: File, namespace: String, fileIdentStyle: IdentConverter, includePrefix: String)(name: String, origin: String, includes: Iterable[String], f: IndentWriter => Unit) {
createFile(folder, fileIdentStyle(name) + "." + spec.cppExt, (w: IndentWriter) => {
w.wl("// AUTOGENERATED FILE - DO NOT MODIFY!")
w.wl("// This file generated by Djinni from " + origin)
w.wl
val myHeader = q(includePrefix + fileIdentStyle(name) + "." + spec.cppHeaderExt)
w.wl(s"#include $myHeader // my header")
includes.foreach(w.wl(_))
w.wl
wrapNamespace(w, namespace, f)
})
}
def generate(idl: Seq[TypeDecl]) {
for (td <- idl.collect { case itd: InternTypeDecl => itd }) td.body match {
case e: Enum =>
assert(td.params.isEmpty)
generateEnum(td.origin, td.ident, td.doc, e)
case r: Record => generateRecord(td.origin, td.ident, td.doc, td.params, r)
case i: Interface => generateInterface(td.origin, td.ident, td.doc, td.params, i)
}
}
def generateEnum(origin: String, ident: Ident, doc: Doc, e: Enum)
def generateRecord(origin: String, ident: Ident, doc: Doc, params: Seq[TypeParam], r: Record)
def generateInterface(origin: String, ident: Ident, doc: Doc, typeParams: Seq[TypeParam], i: Interface)
// --------------------------------------------------------------------------
// Render type expression
def withNs(namespace: Option[String], t: String) = namespace match {
case None => t
case Some("") => "::" + t
case Some(s) => "::" + s + "::" + t
}
def withCppNs(t: String) = withNs(Some(spec.cppNamespace), t)
def writeAlignedCall(w: IndentWriter, call: String, params: Seq[Field], delim: String, end: String, f: Field => String): IndentWriter = {
w.w(call)
val skipFirst = new SkipFirst
params.foreach(p => {
skipFirst { w.wl(delim); w.w(" " * call.length()) }
w.w(f(p))
})
w.w(end)
}
def writeAlignedCall(w: IndentWriter, call: String, params: Seq[Field], end: String, f: Field => String): IndentWriter =
writeAlignedCall(w, call, params, ",", end, f)
def writeAlignedObjcCall(w: IndentWriter, call: String, params: Seq[Field], end: String, f: Field => (String, String)) = {
w.w(call)
val skipFirst = new SkipFirst
params.foreach(p => {
val (name, value) = f(p)
skipFirst { w.wl; w.w(" " * math.max(0, call.length() - name.length)); w.w(name) }
w.w(":" + value)
})
w.w(end)
}
// --------------------------------------------------------------------------
def writeDoc(w: IndentWriter, doc: Doc) {
doc.lines.length match {
case 0 =>
case 1 =>
w.wl(s"/**${doc.lines.head} */")
case _ =>
w.wl("/**")
doc.lines.foreach (l => w.wl(s" *$l"))
w.wl(" */")
}
}
}
| pwais/djinni | src/source/generator.scala | Scala | apache-2.0 | 13,606 |
/*
* Copyright (c) 2017-2022 Lymia Alusyia <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package moe.lymia.princess.views.exportcards
import com.coconut_palm_software.xscalawt.XScalaWT._
import moe.lymia.princess.core.cardmodel.FullCardData
import moe.lymia.princess.util.swt.{DialogBase, HelpButton, UIUtils}
import moe.lymia.princess.views.mainframe.MainFrameState
import org.eclipse.jface.dialogs.ProgressMonitorDialog
import org.eclipse.jface.layout.GridDataFactory
import org.eclipse.jface.viewers._
import org.eclipse.swt.SWT
import org.eclipse.swt.events.SelectionEvent
import org.eclipse.swt.layout._
import org.eclipse.swt.widgets._
import java.nio.file.Paths
import java.util.UUID
sealed abstract class ExportCardsDialogBase[Data](state: MainFrameState) extends DialogBase(state.shell, state.ctx) {
protected val cardCount: Int
override def configureShell(newShell: Shell): Unit = {
super.configureShell(newShell)
newShell.setText(state.i18n.system("_princess.export.title"))
}
protected val hasControls: Boolean
protected def makeDataSettings(parent: Composite): () => Option[Data]
protected def export[T](format: ExportFormat[T, _], options: T, data: Data)
private var viewer : ComboViewer = _
private var dataSettings: () => Option[Data] = _
private var format : ExportFormat[_, _] = _
private var options : ExportControl[_] = _
private def doExport() =
if(format == null)
UIUtils.openMessage(this, SWT.ICON_ERROR | SWT.OK, state.i18n, "_princess.export.noSelectedFormat")
else options.getResult.foreach { options =>
dataSettings().foreach { data =>
export(format.asInstanceOf[ExportFormat[Any, _]], options.asInstanceOf[Any], data)
}
}
override protected def frameContents(frame: Composite): Unit = {
getShell.contains(
_.setLayout(new FillLayout),
_.setMinimumSize(450, 0)
)
frame.contains(
gridLayout(columns = 3)(),
composite(
gridLayout(columns = 2)(_.marginWidth = 0, _.marginHeight = 0),
label(
state.i18n.system("_princess.export.exportFormat", this.cardCount),
_.layoutData = new GridData(SWT.BEGINNING, SWT.CENTER, false, false)
),
*[ComboViewer](SWT.BORDER | SWT.READ_ONLY) (
this.viewer = _,
_.setContentProvider(ArrayContentProvider.getInstance()),
_.setLabelProvider(new LabelProvider {
override def getText(element: scala.Any): String =
state.i18n.system(element.asInstanceOf[ExportFormat[_, _]].displayName)
}),
_.setInput(ExportFormat.formats.toArray[Object]),
_.getControl.layoutData = new GridData(SWT.FILL, SWT.CENTER, true, false)
),
_.layoutData =
GridDataFactory.createFrom(new GridData(SWT.FILL, SWT.CENTER, true, false)).span(3, 1).create()
),
group(
state.i18n.system("_princess.export.exportSettings"),
gridLayout(columns = 3)(),
modifiedFillGridData(_.span(3, 1)),
group => dataSettings = makeDataSettings(group),
group => {
val mark = group.getChildren.length
def regenerate(): Unit = {
for(child <- group.getChildren.drop(mark)) child.dispose()
format = viewer.getStructuredSelection.getFirstElement.asInstanceOf[ExportFormat[_, _]]
if (format != null) options = format.makeControl(this, group, SWT.NONE, state)
val exclude = !hasControls && (format == null || !format.hasControls)
group.setVisible(!exclude)
group.getLayoutData.asInstanceOf[GridData].exclude = exclude
group.layout(true)
if(getShell.isVisible) getShell.pack(true)
}
regenerate()
viewer.addSelectionChangedListener(_ => regenerate())
}
),
label(
if(cardCount > 1) state.i18n.system("_princess.export.cardCount", cardCount) else "",
_.layoutData = new GridData(SWT.BEGINNING, SWT.CENTER, false, false)
),
button(
state.i18n.system("_princess.export.cancel"),
(x : SelectionEvent) => this.close(),
_.layoutData =
GridDataFactory.createFrom(new GridData(SWT.END, SWT.CENTER, true, false)).indent(140, 0).create()
),
button(
state.i18n.system("_princess.export.export"),
(x : SelectionEvent) => doExport(),
_.layoutData = new GridData(SWT.END, SWT.CENTER, false, false)
)
)
viewer.setSelection(new StructuredSelection(ExportFormat.formats.head))
getShell.pack(true)
}
def setFormat(format: ExportFormat[_, _]): Unit =
viewer.setSelection(new StructuredSelection(format))
override def open(): Int = {
state.ctx.asyncUiExec {
viewer.setSelection(new StructuredSelection(ExportFormat.formats.head))
}
super.open()
}
}
final class ExportCardsDialogSingle(state: MainFrameState, exportTarget: (UUID, FullCardData))
extends ExportCardsDialogBase[Unit](state) {
override protected val cardCount: Int = 1
override protected val hasControls: Boolean = false
override protected def makeDataSettings(parent: Composite): () => Option[Unit] = () => Some(())
override protected def export[T](format: ExportFormat[T, _], options: T, data: Unit): Unit = {
val selector = new FileDialog(getShell, SWT.SAVE)
// TODO: Error check the LuaNameSpec
val nameSpec =
LuaNameSpec(state.game.lua.L, state.idData.export.defaultNameFormat, ExportMultiTask.NameSpecFieldNames : _*)
val defaultName =
nameSpec.left.get.makeName(exportTarget._1.toString, exportTarget._2.luaData.now)
selector.setFileName(format.addExtension(defaultName.left.get))
selector.setFilterNames(Array(state.i18n.system(format.displayName)))
selector.setFilterExtensions(Array(format.extension.map(x => s"*.$x").mkString(";")))
selector.setOverwrite(false)
selector.open() match {
case null =>
// canceled
case name =>
new ExportSingleTask(state, Paths.get(name), format, options, exportTarget).run()
this.close()
}
}
}
final class ExportCardsDialogMulti(state: MainFrameState, exportTargets: Seq[(UUID, FullCardData)])
extends ExportCardsDialogBase[LuaNameSpec](state) {
override protected val cardCount: Int = exportTargets.length
private var nameSpec: Text = _
override protected val hasControls: Boolean = true
override protected def makeDataSettings(parent: Composite): () => Option[LuaNameSpec] = {
parent.contains(
label(
state.i18n.system("_princess.export.nameFormat"),
_.layoutData = new GridData(SWT.BEGINNING, SWT.CENTER, false, false)
),
*[Text](SWT.SINGLE | SWT.BORDER) (
this.nameSpec = _,
state.idData.export.defaultNameFormat,
_.layoutData = new GridData(SWT.FILL, SWT.CENTER, true, false)
),
*[HelpButton](SWT.NONE) (
_.setBalloonMessage(
state.i18n.system("_princess.export.nameFormat.help")+
(state.idData.export.helpText match {
case Some(x) => "\\n\\n"+state.i18n.user(x)
case None => ""
})
),
_.layoutData = new GridData(SWT.CENTER, SWT.CENTER, false, false)
)
)
() => {
LuaNameSpec(state.game.lua.L, nameSpec.getText, ExportMultiTask.NameSpecFieldNames : _*) match {
case Left(x) => Some(x)
case Right(x) =>
UIUtils.openMessage(state.shell, SWT.ICON_ERROR | SWT.OK, state.i18n,
"_princess.export.invalidNameFormat", x)
None
}
}
}
override protected def export[T](format: ExportFormat[T, _], options: T, nameSpec: LuaNameSpec): Unit = {
val selector = new DirectoryDialog(getShell, SWT.NONE)
selector.open() match {
case null =>
case name =>
this.close()
val runnable = new ExportMultiTask(state, Paths.get(name), nameSpec, format, options, exportTargets)
new ProgressMonitorDialog(state.shell.getShell).run(true, true, runnable)
}
}
}
object ExportCardsDialog {
def open(state: MainFrameState, exportTargets: (UUID, FullCardData)*): Int =
if(exportTargets.isEmpty) sys.error("No export targets!")
else if(exportTargets.length == 1) new ExportCardsDialogSingle(state, exportTargets.head).open()
else new ExportCardsDialogMulti(state, exportTargets).open()
} | Lymia/PrincessEdit | modules/princess-edit/src/main/scala/moe/lymia/princess/views/exportcards/exportdialog.scala | Scala | mit | 9,489 |
package cspom.compiler
import cspom.CSPOM._
import cspom.util.IntInterval
import cspom.variable.{CSPOMSeq, IntVariable, SimpleExpression}
import cspom.{CSPOM, CSPOMConstraint}
import org.scalatest.{FlatSpec, Matchers, TryValues}
final class CompileTest extends FlatSpec with Matchers with TryValues {
// "CSPOMCompiler" should "compile zebra" in {
// compileTest("zebra.xml.xz");
// }
//
// def compileTest(fn: String) {
// CSPOM.load(classOf[CompileTest].getResource(fn)).map {
// case cspom => CSPOMCompiler.compile(cspom, StandardCompilers())
// } should be a 'success
//
// CSPOM.load(classOf[CompileTest].getResource(fn)).map {
// case cspom => CSPOMCompiler.compile(cspom, StandardCompilers() ++ StandardCompilers.improve())
// } should be a 'success
// }
"MergeEq" should "merge trues" in {
val cspom = CSPOM { implicit problem =>
val v0 = IntVariable(1, 2, 3) as "v0"
val v1 = IntVariable(2, 3, 4) as "v1"
// val v2 = IntVariable(1, 2, 3) as "v2"
// val v3 = IntVariable(1, 2, 3) as "v3"
val b = problem.defineBool(r => CSPOMConstraint(r)("not")(v0 === v1))
ctr(b)
}
CSPOMCompiler.compile(cspom, Seq(MergeEq, NegToCNF, SimplClause, UnaryClause)) //, Reversed, AllDiff))
//println(cspom)
withClue(cspom) {
cspom.constraints should have size 1
assert(cspom.constraints.exists {
c => c.function == "eq" && c.result.isFalse
})
}
}
it should "replace properly" in {
var a1, a2: SimpleExpression[Int] = null
var eq, le1, le2: CSPOMConstraint[_] = null
val problem = CSPOM { implicit problem =>
val x = IntVariable(0 to 1) as "X"
a1 = problem.defineInt(a =>
CSPOMConstraint("sum")(CSPOMSeq(a, x), CSPOMSeq(-1, 1), 0) withParam "mode" -> "eq")
a2 = problem.defineInt { a =>
eq = CSPOMConstraint("eq")(a1, a)
eq
}
le1 = CSPOMConstraint("sum")(CSPOMSeq(a1, 10), CSPOMSeq(1, -1), 0) withParam "mode" -> "le"
le2 = CSPOMConstraint("sum")(CSPOMSeq(11, a2), CSPOMSeq(1, -1), 0) withParam "mode" -> "le"
ctr(le1)
ctr(le2)
}
// println(problem)
val delta2 = ConstraintCompiler.replaceCtr(le1, CSPOMConstraint("sum")(CSPOMSeq(a1), CSPOMSeq(1), 10) withParam "mode" -> "le", problem)
// println(delta2.toString(problem))
// println(problem)
// println("==========")
val delta3 = ConstraintCompiler.replaceCtr(le2, CSPOMConstraint("sum")(CSPOMSeq(a2), CSPOMSeq(-1), -11) withParam "mode" -> "le", problem)
// println(delta3.toString(problem))
// println(problem)
// println("==========")
val delta4 = MergeEq.compile(eq, problem)
// println(delta4.toString(problem))
// println(problem)
// println("==========")
val delta5 = ConstraintCompiler.replace(a1, IntVariable(IntInterval.atMost(10)), problem)
// println(delta5.toString(problem))
// println(problem)
//withClue(delta5.toString(problem)) {
problem.referencedExpressions should not contain a1
//}
}
}
| concrete-cp/cspom | src/test/scala/cspom/compiler/CompileTest.scala | Scala | lgpl-2.1 | 3,086 |
package com.twitter.scalding.parquet.tuple.macros.impl
import com.twitter.bijection.macros.impl.IsCaseClassImpl
import com.twitter.scalding.parquet.tuple.scheme._
import scala.reflect.macros.Context
object ParquetReadSupportProvider {
private[this] sealed trait CollectionType
private[this] case object NOT_A_COLLECTION extends CollectionType
private[this] case object OPTION extends CollectionType
private[this] case object LIST extends CollectionType
private[this] case object SET extends CollectionType
private[this] case object MAP extends CollectionType
def toParquetReadSupportImpl[T](ctx: Context)(implicit T: ctx.WeakTypeTag[T]): ctx.Expr[ParquetReadSupport[T]] = {
import ctx.universe._
if (!IsCaseClassImpl.isCaseClassType(ctx)(T.tpe))
ctx.abort(ctx.enclosingPosition,
s"""We cannot enforce ${T.tpe} is a case class,
either it is not a case class or this macro call is possibly enclosed in a class.
This will mean the macro is operating on a non-resolved type.""")
def buildGroupConverter(tpe: Type, converters: List[Tree], converterGetters: List[Tree],
converterResetCalls: List[Tree], valueBuilder: Tree): Tree =
q"""new _root_.com.twitter.scalding.parquet.tuple.scheme.ParquetTupleConverter[$tpe]{
..$converters
override def currentValue: $tpe = $valueBuilder
override def getConverter(i: Int): _root_.org.apache.parquet.io.api.Converter = {
..$converterGetters
throw new RuntimeException("invalid index: " + i)
}
override def reset(): Unit = {
..$converterResetCalls
}
}"""
def matchField(idx: Int, fieldType: Type, collectionType: CollectionType): (Tree, Tree, Tree, Tree) = {
def fieldConverter(converterName: TermName, converter: Tree, isPrimitive: Boolean = false): Tree = {
def primitiveCollectionElementConverter: Tree =
q"""override val child: _root_.com.twitter.scalding.parquet.tuple.scheme.TupleFieldConverter[$fieldType] =
new _root_.com.twitter.scalding.parquet.tuple.scheme.CollectionElementPrimitiveConverter[$fieldType](this) {
override val delegate: _root_.com.twitter.scalding.parquet.tuple.scheme.PrimitiveFieldConverter[$fieldType] = $converter
}
"""
def caseClassFieldCollectionElementConverter: Tree =
q"""override val child: _root_.com.twitter.scalding.parquet.tuple.scheme.TupleFieldConverter[$fieldType] =
new _root_.com.twitter.scalding.parquet.tuple.scheme.CollectionElementGroupConverter[$fieldType](this) {
override val delegate: _root_.com.twitter.scalding.parquet.tuple.scheme.TupleFieldConverter[$fieldType] = $converter
}
"""
collectionType match {
case OPTION =>
val child = if (isPrimitive) primitiveCollectionElementConverter else caseClassFieldCollectionElementConverter
q"""
val $converterName = new _root_.com.twitter.scalding.parquet.tuple.scheme.OptionConverter[$fieldType] {
$child
}
"""
case LIST =>
val child = if (isPrimitive) primitiveCollectionElementConverter else caseClassFieldCollectionElementConverter
q"""
val $converterName = new _root_.com.twitter.scalding.parquet.tuple.scheme.ListConverter[$fieldType] {
$child
}
"""
case SET =>
val child = if (isPrimitive) primitiveCollectionElementConverter else caseClassFieldCollectionElementConverter
q"""
val $converterName = new _root_.com.twitter.scalding.parquet.tuple.scheme.SetConverter[$fieldType] {
$child
}
"""
case MAP => converter
case _ => q"val $converterName = $converter"
}
}
def createMapFieldConverter(converterName: TermName, K: Type, V: Type, keyConverter: Tree,
valueConverter: Tree): Tree =
q"""val $converterName = new _root_.com.twitter.scalding.parquet.tuple.scheme.MapConverter[$K, $V] {
override val child: _root_.com.twitter.scalding.parquet.tuple.scheme.TupleFieldConverter[($K, $V)] =
new _root_.com.twitter.scalding.parquet.tuple.scheme.MapKeyValueConverter[$K, $V](this) {
override val keyConverter: _root_.com.twitter.scalding.parquet.tuple.scheme.TupleFieldConverter[$K] = $keyConverter
override val valueConverter: _root_.com.twitter.scalding.parquet.tuple.scheme.TupleFieldConverter[$V] = $valueConverter
}
}
"""
def createFieldMatchResult(converterName: TermName, converter: Tree): (Tree, Tree, Tree, Tree) = {
val converterGetter: Tree = q"if($idx == i) return $converterName"
val converterResetCall: Tree = q"$converterName.reset()"
val converterFieldValue: Tree = q"$converterName.currentValue"
(converter, converterGetter, converterResetCall, converterFieldValue)
}
def matchPrimitiveField(converterType: Type): (Tree, Tree, Tree, Tree) = {
val converterName = newTermName(ctx.fresh(s"fieldConverter"))
val innerConverter: Tree = q"new $converterType()"
val converter: Tree = fieldConverter(converterName, innerConverter, isPrimitive = true)
createFieldMatchResult(converterName, converter)
}
def matchCaseClassField(groupConverter: Tree): (Tree, Tree, Tree, Tree) = {
val converterName = newTermName(ctx.fresh(s"fieldConverter"))
val converter: Tree = fieldConverter(converterName, groupConverter)
createFieldMatchResult(converterName, converter)
}
def matchMapField(K: Type, V: Type, keyConverter: Tree, valueConverter: Tree): (Tree, Tree, Tree, Tree) = {
val converterName = newTermName(ctx.fresh(s"fieldConverter"))
val mapConverter = createMapFieldConverter(converterName, K, V, keyConverter, valueConverter)
createFieldMatchResult(converterName, mapConverter)
}
fieldType match {
case tpe if tpe =:= typeOf[String] =>
matchPrimitiveField(typeOf[StringConverter])
case tpe if tpe =:= typeOf[Boolean] =>
matchPrimitiveField(typeOf[BooleanConverter])
case tpe if tpe =:= typeOf[Byte] =>
matchPrimitiveField(typeOf[ByteConverter])
case tpe if tpe =:= typeOf[Short] =>
matchPrimitiveField(typeOf[ShortConverter])
case tpe if tpe =:= typeOf[Int] =>
matchPrimitiveField(typeOf[IntConverter])
case tpe if tpe =:= typeOf[Long] =>
matchPrimitiveField(typeOf[LongConverter])
case tpe if tpe =:= typeOf[Float] =>
matchPrimitiveField(typeOf[FloatConverter])
case tpe if tpe =:= typeOf[Double] =>
matchPrimitiveField(typeOf[DoubleConverter])
case tpe if tpe.erasure =:= typeOf[Option[Any]] =>
val innerType = tpe.asInstanceOf[TypeRefApi].args.head
matchField(idx, innerType, OPTION)
case tpe if tpe.erasure =:= typeOf[List[Any]] =>
val innerType = tpe.asInstanceOf[TypeRefApi].args.head
matchField(idx, innerType, LIST)
case tpe if tpe.erasure =:= typeOf[Set[_]] =>
val innerType = tpe.asInstanceOf[TypeRefApi].args.head
matchField(idx, innerType, SET)
case tpe if tpe.erasure =:= typeOf[Map[_, Any]] =>
val List(keyType, valueType) = tpe.asInstanceOf[TypeRefApi].args
val (keyConverter, _, _, _) = matchField(0, keyType, MAP)
val (valueConverter, _, _, _) = matchField(0, valueType, MAP)
matchMapField(keyType, valueType, keyConverter, valueConverter)
case tpe if IsCaseClassImpl.isCaseClassType(ctx)(tpe) =>
val (innerConverters, innerConvertersGetters, innerConvertersResetCalls, innerFieldValues) = unzip(expandMethod(tpe))
val innerValueBuilderTree = buildTupleValue(tpe, innerFieldValues)
val converterTree: Tree = buildGroupConverter(tpe, innerConverters, innerConvertersGetters,
innerConvertersResetCalls, innerValueBuilderTree)
matchCaseClassField(converterTree)
case _ => ctx.abort(ctx.enclosingPosition, s"Case class $T has unsupported field type : $fieldType ")
}
}
def expandMethod(outerTpe: Type): List[(Tree, Tree, Tree, Tree)] =
outerTpe
.declarations
.collect { case m: MethodSymbol if m.isCaseAccessor => m }
.zipWithIndex
.map {
case (accessorMethod, idx) =>
val fieldType = accessorMethod.returnType
matchField(idx, fieldType, NOT_A_COLLECTION)
}.toList
def unzip(treeTuples: List[(Tree, Tree, Tree, Tree)]): (List[Tree], List[Tree], List[Tree], List[Tree]) = {
val emptyTreeList = List[Tree]()
treeTuples.foldRight(emptyTreeList, emptyTreeList, emptyTreeList, emptyTreeList) {
case ((t1, t2, t3, t4), (l1, l2, l3, l4)) =>
(t1 :: l1, t2 :: l2, t3 :: l3, t4 :: l4)
}
}
def buildTupleValue(tpe: Type, fieldValueBuilders: List[Tree]): Tree = {
if (fieldValueBuilders.isEmpty)
ctx.abort(ctx.enclosingPosition, s"Case class $tpe has no primitive types we were able to extract")
val companion = tpe.typeSymbol.companionSymbol
q"$companion(..$fieldValueBuilders)"
}
val (converters, converterGetters, convertersResetCalls, fieldValues) = unzip(expandMethod(T.tpe))
val groupConverter = buildGroupConverter(T.tpe, converters, converterGetters, convertersResetCalls,
buildTupleValue(T.tpe, fieldValues))
val schema = ParquetSchemaProvider.toParquetSchemaImpl[T](ctx)
val readSupport = q"""
new _root_.com.twitter.scalding.parquet.tuple.scheme.ParquetReadSupport[$T]($schema) {
override val tupleConverter: _root_.com.twitter.scalding.parquet.tuple.scheme.ParquetTupleConverter[$T] = $groupConverter
}
"""
ctx.Expr[ParquetReadSupport[T]](readSupport)
}
}
| sriramkrishnan/scalding | scalding-parquet/src/main/scala/com/twitter/scalding/parquet/tuple/macros/impl/ParquetReadSupportProvider.scala | Scala | apache-2.0 | 10,168 |
/*
* -╥⌐⌐⌐⌐ -⌐⌐⌐⌐-
* ≡╢░░░░⌐\\░░░φ ╓╝░░░░⌐░░░░╪╕
* ╣╬░░` `░░░╢┘ φ▒╣╬╝╜ ░░╢╣Q
* ║╣╬░⌐ ` ╤▒▒▒Å` ║╢╬╣
* ╚╣╬░⌐ ╔▒▒▒▒`«╕ ╢╢╣▒
* ╫╬░░╖ .░ ╙╨╨ ╣╣╬░φ ╓φ░╢╢Å
* ╙╢░░░░⌐"░░░╜ ╙Å░░░░⌐░░░░╝`
* ``˚¬ ⌐ ˚˚⌐´
*
* Copyright © 2016 Flipkart.com
*/
package com.flipkart.connekt.busybees.tests.streams.openweb
import java.util.Date
import com.flipkart.connekt.busybees.encryption.WebPushEncryptionUtils
import com.flipkart.connekt.commons.tests.ConnektUTSpec
import io.jsonwebtoken.{Jwts, SignatureAlgorithm, SignatureException}
import org.jose4j.jws.{AlgorithmIdentifiers, JsonWebSignature}
import org.jose4j.jwt.JwtClaims
/**
* Created by kinshuk.bairagi on 22/02/17.
*/
class WebPushEncryptionTest extends ConnektUTSpec {
val privateKey = "vQ7OPEz9s2KogdXyJ3Y47nLS2oE7QSjHm7NFEEoV8X0"
val publicKey = "BGFhUDXbv6bx3cZI0LintxwMroAD7VSzlRASzjLC3iU7bMIEsj0Kn1RJTbbNbGo7DzMZ8XUEKPemB5qN_6rNc_U"
"WebPushEncryption" should "do generate jwt properly" in {
val key = WebPushEncryptionUtils.loadPrivateKey(privateKey)
val claims = new JwtClaims()
claims.setAudience("https://fcm.googleapis.com")
claims.setExpirationTimeMinutesInTheFuture(12 * 60)
claims.setSubject("mailto:[email protected]")
val jws = new JsonWebSignature()
jws.setHeader("typ", "JWT")
jws.setPayload(claims.toJson)
jws.setKey(key)
jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.ECDSA_USING_P256_CURVE_AND_SHA256)
val compactJws = jws.getCompactSerialization.stripSuffix("=")
println("WebPush1 " + compactJws)
assert(verifyToken(compactJws))
}
"Webpush" should "using jjwt" in {
val key = WebPushEncryptionUtils.loadPrivateKey(privateKey)
val compactJws = Jwts.builder()
.setHeaderParam("typ", "JWT")
.setSubject("mailto:[email protected]")
.setAudience("https://fcm.googleapis.com")
.setExpiration(new Date(System.currentTimeMillis() + 3600000))
.signWith(SignatureAlgorithm.ES256, key)
.compact()
println("WebPush2 " + compactJws)
assert(verifyToken(compactJws))
}
def verifyToken(token: String): Boolean = {
val pKey = WebPushEncryptionUtils.loadPublicKey(publicKey)
try {
Jwts.parser().setSigningKey(pKey).parseClaimsJws(token)
//OK, we can trust this JWT
true
} catch {
case e: SignatureException => e.printStackTrace()
false
//don't trust the JWT!
}
}
}
| Flipkart/connekt | busybees/src/test/scala/com/flipkart/connekt/busybees/tests/streams/openweb/WebPushEncryptionTest.scala | Scala | mit | 2,796 |
/*
* Copyright (C) 2011 the original author or authors.
* See the license.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.fhworms.ztt.rcap
import akka.actor.ActorRef
import akka.actor.Actor._
import java.net.InetAddress
import de.fhworms.ztt.rcap.messages._
import de.fhworms.ztt.rcap.util._
import de.fhworms.ztt.rcap.logger._
import de.fhworms.ztt.rcap.actor.ListenerActor
object ClipboardListener {
private var sleepTime = 1000
def main(args: Array[String]): Unit = {
val env = System.getenv
if (env.containsKey("RCAP_HOME"))
HOME = env.get("RCAP_HOME")
Logger(HOME + "/listener.log")
initializeConfiguration(args)
// val server = getServer
val listener = actorOf(new ListenerActor(sleepTime)).start
listener ! "Repeat"
addShutDownHook {
Logger.stop
listener ! Stop
}
// Logger.info("Connected to Actor: " + HOST + "-rcap")
// var prevContent = de.fhworms.ztt.rcap.clipboard.TextClipboard.content()
// while (true) {
// Thread.sleep(sleepTime)
// try {
// val content = de.fhworms.ztt.rcap.clipboard.TextClipboard.content()
// if (content != prevContent) {
// server ! ClipboardChanged(content)
// prevContent = content
// Logger.debug("Content of the clipboard has changed to:\\n\\t" + content)
// }
// } catch {
// case e: Exception => Logger warning e.getStackTraceString
// }
// }
}
private def getServer = remote.actorFor(HOST + "-rcap",
HOST,
PORT)
private def initializeConfiguration(args: Array[String]) = {
HOST = InetAddress.getLocalHost.getHostName
val configFileIterator =
if (args.indexOf("--conf") >= 0) {
readConfFile(args(args.indexOf("--conf") + 1)) _
} else {
readConfFile(HOME + "/rcap.conf") _
}
configFileIterator {
case Array("localhost", host) => HOST = host; Logger.info("Host: " + host)
case Array("port", port) => PORT = port.toInt; Logger.info("Port: " + port)
case Array("loglevel", level) => level.toLowerCase match {
case "debug" => Logger.Level = Debug
case "info" => Logger.Level = Info
case "warning" => Logger.Level = Warning
case "error" => Logger.Level = Error
}
case Array("listenersleeptime", time) => sleepTime = time.toInt
case _ =>
}
if (args.indexOf("-h") >= 0) {
HOST = args(args.indexOf("-h") + 1)
}
if (args.indexOf("-p") >= 0) {
PORT = args(args.indexOf("-p") + 1).toInt
}
if (args.indexOf("-t") >= 0) {
sleepTime = args(args.indexOf("-t") + 1).toInt
}
}
} | Frogurth/RCAP | src/de/fhworms/ztt/rcap/ClipboardListener.scala | Scala | apache-2.0 | 3,326 |
package org.elasticsearch.spark.sql
import org.junit.Assert._
import org.junit.Test
import scala.collection.mutable
class DefaultSourceTest {
@Test
def parameters(): Unit = {
val settings = new mutable.LinkedHashMap[String, String]()
settings.put("path", "wrong")
settings.put("resource", "wrong")
settings.put("es_resource", "preferred")
settings.put("unrelated", "unrelated")
val relation = new DefaultSource().params(settings.toMap)
assertEquals(Map("es.resource" -> "preferred", "es.unrelated" -> "unrelated"), relation)
}
} | takezoe/elasticsearch-hadoop | spark/sql-20/src/test/scala/org/elasticsearch/spark/sql/DefaultSourceTest.scala | Scala | apache-2.0 | 568 |
package app.state
import model.fighters.Fighter
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
class DuelState extends ArenaState {
override def addFighter(fighter: Fighter) = {
if(fighters.size == 2)
throw new Exception("Два бойца уже выбрано.")
super.addFighter(fighter)
}
/**
* Моделирует битву
* @return записи лога
*/
override def fight: List[String] = {
if(fighters.size != 2)
throw new Exception("Нужно выбрать двух бойцов.")
var i = 0
var pair = (fighters.head, fighters.last)
val logBuffer = ListBuffer[String]()
prepareFighters()
do {
logBuffer += s"Ход $i. $fightersState\\n" + pair._1.attack(pair._2)
pair = pair.swap
i += 1
} while(!pair._1.isDead && !pair._2.isDead && i < 100)
logBuffer += s"\\nКонец боя. $fightersState\\nПобедитель: " +
List(pair._1, pair._2).filter(!_.isDead).map(_.name).reduceOption(_+ ", " +_).getOrElse("ничья")
logBuffer.result()
}
}
| treble-snake/gladiators | src/main/scala/app/state/DuelState.scala | Scala | apache-2.0 | 1,099 |
/*
* Copyright 2015 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ibm.spark.communication.socket
import org.zeromq.ZMQ.{Socket, Context}
/**
* Represents the runnable component of a socket specifically targeted towards
* publish sockets. No incoming messages are processed.
*
* @param context The ZMQ context to use with this runnable to create a socket
* @param socketOptions The options to use when creating the socket
*/
class PubSocketRunnable(
private val context: Context,
private val socketOptions: SocketOption*
) extends ZeroMQSocketRunnable(
context,
PubSocket,
None,
socketOptions: _*
) {
/** Does nothing. */
override protected def processNextInboundMessage(
socket: Socket,
flags: Int
): Unit = {}
}
| yeghishe/spark-kernel | communication/src/main/scala/com/ibm/spark/communication/socket/PubSocketRunnable.scala | Scala | apache-2.0 | 1,295 |
package eve
import eveapi.utils.Decoders._
import eveapi.errors.{EveApiError, EveApiStatusFailed}
import java.time.Clock
import scalaz.stream.async.mutable.Topic
import scala.concurrent.duration.Duration
import scala.collection.concurrent.TrieMap
import scalaz.stream.{Exchange, Process, Sink, async}
import scalaz.concurrent.Task
import scalaz._
import java.util.concurrent.ScheduledExecutorService
import org.http4s.{ Response, Uri }
import org.http4s.Uri.Authority
import org.http4s.util.CaseInsensitiveString
import models._
import oauth._
import shared._
import eveapi._
import eveapi.oauth._
case class EveServer(server: Uri.RegName)
case class TopicKey(fleetId: Long, userId: Long)
case class TopicHolder(pollInterval: Duration, oauth: OAuth2, server: EveServer)(implicit s: ScheduledExecutorService) {
def fleetUri(id: Long, server: EveServer) = Uri(scheme = Some(CaseInsensitiveString("https")), authority = Some(Authority(host=server.server)), path = s"/fleets/$id/")
private val topics = TrieMap[TopicKey, Topic[EveApiError \\/ FleetState]]()
def apply(user: User, fleetId: Long): Topic[EveApiError \\/ FleetState] = {
topics
.retain({ case (id, topic) =>
! topic.subscribe.isHalt
})
.getOrElseUpdate(TopicKey(fleetId, user.id), {
async.topic(
ApiStream.fleetPollSource(fleetUri(fleetId, server), pollInterval, Execute.OAuthInterpreter)
.translate[Task](ApiStream.fromApiStream(oauth, user.token))
, true)
}
)
}
}
| reactormonk/fleet-buddy | server/src/main/scala/eve/topics.scala | Scala | agpl-3.0 | 1,522 |
/*
* This file is part of pelam-scala-csv
*
* Copyright © Peter Lamberg 2015 ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fi.pelam.csv.table
import fi.pelam.csv.cell.{CellKey, IntegerCell, StringCell}
import org.junit.Assert._
import org.junit.Test
class TableReadingErrorTest {
import TableReadingErrorTest._
@Test
def testToString: Unit = {
assertEquals("Some error occured.\\n" +
" The error is related to the StringCell with value 'foocell' at Row 2, Column C (2).", errorFoo.toString)
assertEquals("Another error occured.\\n" +
" The error is related to the IntegerCell with value '123' at Row 3, Column D (3).", errorBar.toString)
}
@Test
def testAddedDetailsCellDefined: Unit = {
// This should have no effect
val alreadySpecifiedCell = errorFoo.relatedCellAdded(intCell)
assertEquals("Some error occured.\\n" +
" The error is related to the StringCell with value 'foocell' at Row 2, Column C (2).", alreadySpecifiedCell.toString)
}
@Test
def testAddedDetailsMessage: Unit = {
// This should have no effect
val alreadySpecifiedCell = errorFoo.messageAppended("Appended.")
assertEquals("Some error occured.\\n Appended.\\n" +
" The error is related to the StringCell with value 'foocell' at Row 2, Column C (2).", alreadySpecifiedCell.toString)
}
@Test
def testAddedDetailsCell: Unit = {
val modifiedError = TableReadingError("Error originally without Cell.").relatedCellAdded(intCell)
assertEquals("Error originally without Cell.\\n" +
" The error is related to the IntegerCell with value '123' at Row 3, Column D (3).", modifiedError.toString)
}
}
object TableReadingErrorTest {
val stringCell = StringCell(CellKey(1, 2), "foocell")
val errorFoo = TableReadingError("Some error occured.", Some(stringCell))
val intCell = IntegerCell(CellKey(2, 3), 123)
val errorBar = TableReadingError("Another error occured.", Some(intCell))
} | pelamfi/pelam-scala-csv | src/test/scala/fi/pelam/csv/table/TableReadingErrorTest.scala | Scala | apache-2.0 | 2,513 |
package com.etsy.sbt
import java.util
import sbt._
import sbt.internal.util.ManagedLogger
import sjsonnew.BasicJsonProtocol
import xsbti.{AnalysisCallback, Position, Severity, UseScope}
import xsbti.api.{ClassLike, DependencyContext}
import xsbti.compile.{Compilers, DependencyChanges, Inputs}
private[sbt] object Compatibility {
def createPackage(config: Package.Configuration, streams: Keys.TaskStreams): Unit = {
Package(config, streams.cacheStoreFactory, streams.log)
}
def scalac(compilers: Compilers,
sources: Seq[File],
changes: DependencyChanges,
classpath: Seq[File],
outputDir: File,
options: Seq[String],
callback: AnalysisCallback,
maxErrors: Int,
inputs: Inputs,
log: ManagedLogger): Unit = {
compilers.scalac() match {
case c: sbt.internal.inc.AnalyzingCompiler =>
c.apply(
sources.toArray,
changes,
classpath.toArray,
outputDir,
options.toArray,
callback,
maxErrors,
inputs.setup().cache(),
log
)
case unknown_compiler =>
log.error("wrong compiler, expected 'sbt.internal.inc.AnalyzingCompiler' got: " + unknown_compiler.getClass.getName)
}
}
object Implicits extends BasicJsonProtocol
// This discards the analysis produced by compiling one file, as it
// isn't that useful
object noopCallback extends xsbti.AnalysisCallback {
override def startSource(source: File): Unit = {}
override def mainClass(sourceFile: File, className: String): Unit = {}
override def apiPhaseCompleted(): Unit = {}
override def enabled(): Boolean = false
override def binaryDependency(onBinaryEntry: File, onBinaryClassName: String, fromClassName: String, fromSourceFile: File, context: DependencyContext): Unit = {}
override def generatedNonLocalClass(source: File, classFile: File, binaryClassName: String, srcClassName: String): Unit = {}
override def problem(what: String, pos: Position, msg: String, severity: Severity, reported: Boolean): Unit = {}
override def dependencyPhaseCompleted(): Unit = {}
override def classDependency(onClassName: String, sourceClassName: String, context: DependencyContext): Unit = {}
override def generatedLocalClass(source: File, classFile: File): Unit = {}
override def api(sourceFile: File, classApi: ClassLike): Unit = {}
override def usedName(className: String, name: String, useScopes: util.EnumSet[UseScope]): Unit = {}
}
}
| etsy/sbt-compile-quick-plugin | src/main/scala-sbt-1.0/com/etsy/sbt/Compatibility.scala | Scala | mit | 2,600 |
package fr.acinq.eclair.db.migration
import fr.acinq.eclair.db.jdbc.JdbcUtils.ExtendedResultSet._
import fr.acinq.eclair.db.migration.MigrateDb.{checkVersions, migrateTable}
import java.sql.{Connection, PreparedStatement, ResultSet, Timestamp}
import java.time.Instant
object MigratePaymentsDb {
private def migrateReceivedPaymentsTable(source: Connection, destination: Connection): Int = {
val sourceTable = "received_payments"
val insertSql = "INSERT INTO payments.received (payment_hash, payment_type, payment_preimage, payment_request, received_msat, created_at, expire_at, received_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?)"
def migrate(rs: ResultSet, insertStatement: PreparedStatement): Unit = {
insertStatement.setString(1, rs.getByteVector("payment_hash").toHex)
insertStatement.setString(2, rs.getString("payment_type"))
insertStatement.setString(3, rs.getByteVector("payment_preimage").toHex)
insertStatement.setString(4, rs.getString("payment_request"))
insertStatement.setObject(5, rs.getLongNullable("received_msat").orNull)
insertStatement.setTimestamp(6, Timestamp.from(Instant.ofEpochMilli(rs.getLong("created_at"))))
insertStatement.setTimestamp(7, Timestamp.from(Instant.ofEpochMilli(rs.getLong("expire_at"))))
insertStatement.setObject(8, rs.getLongNullable("received_at").map(l => Timestamp.from(Instant.ofEpochMilli(l))).orNull)
}
migrateTable(source, destination, sourceTable, insertSql, migrate)
}
private def migrateSentPaymentsTable(source: Connection, destination: Connection): Int = {
val sourceTable = "sent_payments"
val insertSql = "INSERT INTO payments.sent (id, parent_id, external_id, payment_hash, payment_preimage, payment_type, amount_msat, fees_msat, recipient_amount_msat, recipient_node_id, payment_request, payment_route, failures, created_at, completed_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"
def migrate(rs: ResultSet, insertStatement: PreparedStatement): Unit = {
insertStatement.setString(1, rs.getString("id"))
insertStatement.setString(2, rs.getString("parent_id"))
insertStatement.setString(3, rs.getStringNullable("external_id").orNull)
insertStatement.setString(4, rs.getByteVector("payment_hash").toHex)
insertStatement.setString(5, rs.getByteVector32Nullable("payment_preimage").map(_.toHex).orNull)
insertStatement.setString(6, rs.getString("payment_type"))
insertStatement.setLong(7, rs.getLong("amount_msat"))
insertStatement.setObject(8, rs.getLongNullable("fees_msat").orNull)
insertStatement.setLong(9, rs.getLong("recipient_amount_msat"))
insertStatement.setString(10, rs.getByteVector("recipient_node_id").toHex)
insertStatement.setString(11, rs.getStringNullable("payment_request").orNull)
insertStatement.setBytes(12, rs.getBytes("payment_route"))
insertStatement.setBytes(13, rs.getBytes("failures"))
insertStatement.setTimestamp(14, Timestamp.from(Instant.ofEpochMilli(rs.getLong("created_at"))))
insertStatement.setObject(15, rs.getLongNullable("completed_at").map(l => Timestamp.from(Instant.ofEpochMilli(l))).orNull)
}
migrateTable(source, destination, sourceTable, insertSql, migrate)
}
def migrateAllTables(source: Connection, destination: Connection): Unit = {
checkVersions(source, destination, "payments", 4, 6)
migrateReceivedPaymentsTable(source, destination)
migrateSentPaymentsTable(source, destination)
}
}
| ACINQ/eclair | eclair-core/src/main/scala/fr/acinq/eclair/db/migration/MigratePaymentsDb.scala | Scala | apache-2.0 | 3,509 |
package models.analytics.subscription
import akka.actor.ActorLogging
import akka.contrib.pattern.ReceivePipeline
import akka.contrib.pattern.ReceivePipeline.Inner
import models.market.MarketEMACollection
import models.market.MarketStructures.{Candles, ClosePrice, ExponentialMovingAverage, MarketMessage}
/**
* Created by bishop on 11/1/16.
*/
trait ExponentialMovingAverages extends ActorLogging {
this: ReceivePipeline => pipelineInner {
case msg: MarketMessage =>
averages.foreach(_.updateAverages(ClosePrice(msg.time, msg.last)))
Inner(msg)
/**
* Assumes that the candles are orderd by latest time first!
*/
case mc: Candles =>
val closePrices = mc.candles.map( c => ClosePrice(c.time, c.close))
setAverages(closePrices)
Inner(mc)
}
// to be defined
val marketName: String
// these are the system defaults but can be overridden
val periods: List[Int] = List(7, 15)
val periodMinutes = 5
// list of ema collections
private val averages = scala.collection.mutable.ListBuffer[MarketEMACollection]()
def setAllMarketAverages(marketAverages: List[MarketEMACollection]) = averages ++= marketAverages
/**
* Sets all averages for each period as a collection of exponential
* moving averages.
*
* @param closePrices assumes the close prices are sorted with most recent
* close period first and that all close prices have the same periodMinutes
* as this object. Example, close prices for every 5 minute
* period for the last 24 hour window.
* @return
*/
def setAverages(closePrices: List[ClosePrice]) = {
// create a new collection for specific periods
val newAverages = for (period <- periods)
yield new MarketEMACollection(marketName, period, periodMinutes, closePrices)
averages ++= newAverages
}
/**
* Returns the latest ema for each period.
*
* @return map of period to ema for that period
*/
def getLatestMovingAverages(): Map[Int, BigDecimal] = {
if (averages.nonEmpty) {
averages.map(a => a.period -> a.emas.head.ema).toMap
} else {
Map[Int, BigDecimal]()
}
}
/**
* Returns the moving averages for this market.
*
* @return a map of period to exponential moving averages for that period
* list of ExponentialMovingAverages are ordered with most recent first
*/
def getMovingAverages(): Map[Int, List[ExponentialMovingAverage]] = {
if (averages.nonEmpty) {
averages.map(a => a.period -> a.emas).toMap
} else {
Map.empty[Int, List[ExponentialMovingAverage]]
}
}
}
| asciiu/polo | app/models/analytics/subscription/ExponentialMovingAverages.scala | Scala | mit | 2,676 |
package com.shocktrade.webapp.routes
import com.shocktrade.common.forms.MaxResultsForm
import com.shocktrade.common.models.EntitySearchResult
import com.shocktrade.common.models.quote.ResearchQuote
import com.shocktrade.common.models.user.User
import com.shocktrade.server.dao.securities.SecuritiesDAO
import com.shocktrade.server.dao.securities.SecuritiesDAO._
import com.shocktrade.server.dao.users.UserDAO
import com.shocktrade.server.dao.users.UserDAO._
import io.scalajs.npm.express.{Application, Request, Response}
import io.scalajs.npm.mongodb.{Collection, Db, _}
import io.scalajs.util.JsUnderOrHelper._
import io.scalajs.util.PromiseHelper.Implicits._
import scala.concurrent.{ExecutionContext, Future}
import scala.scalajs.js
import scala.util.{Failure, Success}
/**
* Search Routes
* @author [email protected]
*/
object SearchRoutes {
def init(app: Application, dbFuture: Future[Db])(implicit ec: ExecutionContext): Unit = {
implicit val securities = dbFuture.map(_.getSecuritiesDAO).map(SecuritiesSearchAgent)
implicit val users = dbFuture.map(_.getUserDAO).map(UserSearchAgent)
app.get("/api/search", (request: Request, response: Response, next: NextFunction) => getSearchResults(request, response, next))
/**
* Searches for people, groups, organizations and events
* @example GET /api/search?searchTerm=mic&maxResults=10
*/
def getSearchResults(request: Request, response: Response, next: NextFunction) = {
val searchAgents = Seq(users, securities)
val form = request.queryAs[SearchForm]
def search(searchTerm: String, maxResults: Int) = Future.sequence(searchAgents map (_.flatMap(_.search(searchTerm, maxResults)))) map (_.flatten)
form.searchTerm.toOption map ((_, form.getMaxResults())) match {
case Some((searchTerm, maxResults)) =>
search(searchTerm, maxResults) onComplete {
case Success(searchResults) => response.send(js.Array(searchResults: _*)); next()
case Failure(e) => response.internalServerError(e); next()
}
case None =>
response.badRequest("Bad Request: searchTerm is required"); next()
}
}
}
/**
* String Extensions
* @param text the given string
*/
implicit class StringExtensions(val text: String) extends AnyVal {
def limit(maxLength: Int) = if (text.length > maxLength) text.take(maxLength) + "..." else text
}
/**
* Search Form
* @author [email protected]
*/
@js.native
trait SearchForm extends MaxResultsForm {
var searchTerm: js.UndefOr[String] = js.native
}
/**
* Abstract Search Agent
* @author [email protected]
*/
trait SearchAgent[T <: js.Any] {
def coll: Collection
def fields: js.Array[String]
def search(searchTerm: String, maxResults: Int)(implicit ec: ExecutionContext) = {
coll.find[T](selector = getSelection(searchTerm)).limit(maxResults).toArray() map (_ map toSearchResult)
}
def toSearchResult(entity: T): EntitySearchResult
private def getSelection(searchTerm: String) = {
fields.foldLeft[List[(String, js.Any)]](Nil) { (list, field) =>
(field $regex(s"^$searchTerm", ignoreCase = true)) :: list
} match {
case Nil => doc()
case one :: Nil => doc(one)
case many => doc($or(many: _*))
}
}
}
/**
* Securities Search Agent
* @author [email protected]
*/
case class SecuritiesSearchAgent(coll: SecuritiesDAO) extends SearchAgent[ResearchQuote] {
val fields = js.Array("symbol", "name")
override def toSearchResult(quote: ResearchQuote) = {
new EntitySearchResult(
_id = quote.symbol,
name = quote.symbol,
description = quote.name,
`type` = "STOCK"
)
}
}
/**
* User Search Agent
* @author [email protected]
*/
case class UserSearchAgent(coll: UserDAO) extends SearchAgent[User] {
val fields = js.Array("name")
override def toSearchResult(user: User) = {
new EntitySearchResult(
_id = user.facebookID,
name = user.name,
description = user.description.flat.map(_.limit(50)) ?? "Day Trader",
`type` = "USER"
)
}
}
}
| ldaniels528/shocktrade.js | app/server/webapp/src/main/scala/com/shocktrade/webapp/routes/SearchRoutes.scala | Scala | apache-2.0 | 4,282 |
package frameless
package cats
import _root_.cats._
import _root_.cats.kernel.{CommutativeMonoid, CommutativeSemigroup}
import _root_.cats.implicits._
import alleycats.Empty
import scala.reflect.ClassTag
import org.apache.spark.rdd.RDD
object implicits extends FramelessSyntax with SparkDelayInstances {
implicit class rddOps[A: ClassTag](lhs: RDD[A]) {
def csum(implicit m: CommutativeMonoid[A]): A =
lhs.fold(m.empty)(_ |+| _)
def csumOption(implicit m: CommutativeSemigroup[A]): Option[A] =
lhs.aggregate[Option[A]](None)(
(acc, a) => Some(acc.fold(a)(_ |+| a)),
(l, r) => l.fold(r)(x => r.map(_ |+| x) orElse Some(x))
)
def cmin(implicit o: Order[A], e: Empty[A]): A = {
if (lhs.isEmpty) e.empty
else lhs.reduce(_ min _)
}
def cminOption(implicit o: Order[A]): Option[A] =
csumOption(new CommutativeSemigroup[A] {
def combine(l: A, r: A) = l min r
})
def cmax(implicit o: Order[A], e: Empty[A]): A = {
if (lhs.isEmpty) e.empty
else lhs.reduce(_ max _)
}
def cmaxOption(implicit o: Order[A]): Option[A] =
csumOption(new CommutativeSemigroup[A] {
def combine(l: A, r: A) = l max r
})
}
implicit class pairRddOps[K: ClassTag, V: ClassTag](lhs: RDD[(K, V)]) {
def csumByKey(implicit m: CommutativeSemigroup[V]): RDD[(K, V)] = lhs.reduceByKey(_ |+| _)
def cminByKey(implicit o: Order[V]): RDD[(K, V)] = lhs.reduceByKey(_ min _)
def cmaxByKey(implicit o: Order[V]): RDD[(K, V)] = lhs.reduceByKey(_ max _)
}
}
object union {
implicit def unionSemigroup[A]: Semigroup[RDD[A]] =
new Semigroup[RDD[A]] {
def combine(lhs: RDD[A], rhs: RDD[A]): RDD[A] = lhs union rhs
}
}
object inner {
implicit def pairwiseInnerSemigroup[K: ClassTag, V: ClassTag: Semigroup]: Semigroup[RDD[(K, V)]] =
new Semigroup[RDD[(K, V)]] {
def combine(lhs: RDD[(K, V)], rhs: RDD[(K, V)]): RDD[(K, V)] =
lhs.join(rhs).mapValues { case (x, y) => x |+| y }
}
}
object outer {
implicit def pairwiseOuterSemigroup[K: ClassTag, V: ClassTag](implicit m: Monoid[V]): Semigroup[RDD[(K, V)]] =
new Semigroup[RDD[(K, V)]] {
def combine(lhs: RDD[(K, V)], rhs: RDD[(K, V)]): RDD[(K, V)] =
lhs.fullOuterJoin(rhs).mapValues {
case (Some(x), Some(y)) => x |+| y
case (None, Some(y)) => y
case (Some(x), None) => x
case (None, None) => m.empty
}
}
}
| imarios/frameless | cats/src/main/scala/frameless/cats/implicits.scala | Scala | apache-2.0 | 2,471 |
/*
* Copyright 2011-2021 Asakusa Framework Team.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.asakusafw.spark.compiler
package operator
package user
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.junit.JUnitRunner
import java.util.function.Consumer
import java.io.{ DataInput, DataOutput }
import java.lang.{ Iterable => JIterable }
import java.util.{ List => JList }
import java.util.function.Consumer
import scala.collection.JavaConversions._
import org.apache.hadoop.io.Writable
import org.apache.spark.broadcast.{ Broadcast => Broadcasted }
import com.asakusafw.lang.compiler.model.description.{ ClassDescription, ImmediateDescription }
import com.asakusafw.lang.compiler.model.graph.{ Groups, MarkerOperator, Operator, OperatorInput }
import com.asakusafw.lang.compiler.model.graph.Operator.InputOptionBuilder
import com.asakusafw.lang.compiler.model.testing.OperatorExtractor
import com.asakusafw.lang.compiler.planning.PlanMarker
import com.asakusafw.runtime.core.{ GroupView, Result, View }
import com.asakusafw.runtime.model.DataModel
import com.asakusafw.runtime.value.{ IntOption, StringOption }
import com.asakusafw.spark.compiler.broadcast.MockBroadcast
import com.asakusafw.spark.compiler.spi.{ OperatorCompiler, OperatorType }
import com.asakusafw.spark.runtime.fragment.{ Fragment, GenericOutputFragment }
import com.asakusafw.spark.runtime.graph.BroadcastId
import com.asakusafw.spark.runtime.io.WritableSerDe
import com.asakusafw.spark.runtime.rdd.ShuffleKey
import com.asakusafw.spark.tools.asm._
import com.asakusafw.vocabulary.attribute.BufferType
import com.asakusafw.vocabulary.operator.CoGroup
@RunWith(classOf[JUnitRunner])
class CoGroupOperatorCompilerSpecTest extends CoGroupOperatorCompilerSpec
class CoGroupOperatorCompilerSpec extends FlatSpec with UsingCompilerContext {
import CoGroupOperatorCompilerSpec._
behavior of classOf[CoGroupOperatorCompiler].getSimpleName
for {
s <- Seq("s", null)
} {
it should s"compile CoGroup operator${if (s == null) " with argument null" else ""}" in {
val operator = OperatorExtractor
.extract(classOf[CoGroup], classOf[CoGroupOperator], "cogroup")
.input("foos", ClassDescription.of(classOf[Foo]),
Groups.parse(Seq("id")))
.input("bars", ClassDescription.of(classOf[Bar]),
Groups.parse(Seq("fooId"), Seq("+id")))
.output("fooResult", ClassDescription.of(classOf[Foo]))
.output("barResult", ClassDescription.of(classOf[Bar]))
.output("fooError", ClassDescription.of(classOf[Foo]))
.output("barError", ClassDescription.of(classOf[Bar]))
.output("nResult", ClassDescription.of(classOf[N]))
.argument("n", ImmediateDescription.of(10))
.argument("s", ImmediateDescription.of(s))
.build()
implicit val context = newOperatorCompilerContext("flowId")
val thisType = OperatorCompiler.compile(operator, OperatorType.CoGroupType)
val cls = context.loadClass[Fragment[IndexedSeq[Iterator[_]]]](thisType.getClassName)
val fooResult = new GenericOutputFragment[Foo]()
val fooError = new GenericOutputFragment[Foo]()
val barResult = new GenericOutputFragment[Bar]()
val barError = new GenericOutputFragment[Bar]()
val nResult = new GenericOutputFragment[N]()
val fragment = cls.getConstructor(
classOf[Map[BroadcastId, Broadcasted[_]]],
classOf[Fragment[_]], classOf[Fragment[_]],
classOf[Fragment[_]], classOf[Fragment[_]],
classOf[Fragment[_]])
.newInstance(Map.empty, fooResult, barResult, fooError, barError, nResult)
{
fragment.reset()
val foos = Seq.empty[Foo]
val bars = Seq.empty[Bar]
fragment.add(IndexedSeq(foos.iterator, bars.iterator))
assert(fooResult.iterator.size === 0)
assert(barResult.iterator.size === 0)
assert(fooError.iterator.size === 0)
assert(barError.iterator.size === 0)
val nResults = nResult.iterator.toSeq
assert(nResults.size === 1)
assert(nResults.head.n.get === 10)
}
fragment.reset()
assert(fooResult.iterator.size === 0)
assert(barResult.iterator.size === 0)
assert(fooError.iterator.size === 0)
assert(barError.iterator.size === 0)
assert(nResult.iterator.size === 0)
{
fragment.reset()
val foo = new Foo()
foo.id.modify(1)
val foos = Seq(foo)
val bar = new Bar()
bar.id.modify(10)
bar.fooId.modify(1)
val bars = Seq(bar)
fragment.add(IndexedSeq(foos.iterator, bars.iterator))
val fooResults = fooResult.iterator.toSeq
assert(fooResults.size === 1)
assert(fooResults.head.id.get === foo.id.get)
if (s == null) {
assert(fooResults.head.s.isNull)
} else {
assert(fooResults.head.s.getAsString === s)
}
val barResults = barResult.iterator.toSeq
assert(barResults.size === 1)
assert(barResults.head.id.get === bar.id.get)
assert(barResults.head.fooId.get === bar.fooId.get)
if (s == null) {
assert(barResults.head.s.isNull)
} else {
assert(barResults.head.s.getAsString === s)
}
val fooErrors = fooError.iterator.toSeq
assert(fooErrors.size === 0)
val barErrors = barError.iterator.toSeq
assert(barErrors.size === 0)
val nResults = nResult.iterator.toSeq
assert(nResults.size === 1)
assert(nResults.head.n.get === 10)
}
fragment.reset()
assert(fooResult.iterator.size === 0)
assert(barResult.iterator.size === 0)
assert(fooError.iterator.size === 0)
assert(barError.iterator.size === 0)
assert(nResult.iterator.size === 0)
{
fragment.reset()
val foo = new Foo()
foo.id.modify(1)
val foos = Seq(foo)
val bars = (0 until 10).map { i =>
val bar = new Bar()
bar.id.modify(i)
bar.fooId.modify(1)
bar
}
fragment.add(IndexedSeq(foos.iterator, bars.iterator))
val fooResults = fooResult.iterator.toSeq
assert(fooResults.size === 0)
val barResults = barResult.iterator.toSeq
assert(barResults.size === 0)
val fooErrors = fooError.iterator.toSeq
assert(fooErrors.size === 1)
assert(fooErrors.head.id.get === foo.id.get)
if (s == null) {
assert(fooErrors.head.s.isNull)
} else {
assert(fooErrors.head.s.getAsString === s)
}
val barErrors = barError.iterator.toSeq
assert(barErrors.size === 10)
barErrors.zip(bars).foreach {
case (actual, expected) =>
assert(actual.id.get === expected.id.get)
assert(actual.fooId.get === expected.fooId.get)
if (s == null) {
assert(actual.s.isNull)
} else {
assert(actual.s.getAsString === s)
}
}
val nResults = nResult.iterator.toSeq
assert(nResults.size === 1)
assert(nResults.head.n.get === 10)
}
fragment.reset()
assert(fooResult.iterator.size === 0)
assert(barResult.iterator.size === 0)
assert(fooError.iterator.size === 0)
assert(barError.iterator.size === 0)
assert(nResult.iterator.size === 0)
}
}
it should "compile CoGroup operator with projective model" in {
val operator = OperatorExtractor
.extract(classOf[CoGroup], classOf[CoGroupOperator], "cogroupp")
.input("foos", ClassDescription.of(classOf[Foo]),
Groups.parse(Seq("id")))
.input("bars", ClassDescription.of(classOf[Bar]),
Groups.parse(Seq("fooId"), Seq("+id")))
.output("fooResult", ClassDescription.of(classOf[Foo]))
.output("barResult", ClassDescription.of(classOf[Bar]))
.output("fooError", ClassDescription.of(classOf[Foo]))
.output("barError", ClassDescription.of(classOf[Bar]))
.output("nResult", ClassDescription.of(classOf[N]))
.argument("n", ImmediateDescription.of(10))
.build()
implicit val context = newOperatorCompilerContext("flowId")
val thisType = OperatorCompiler.compile(operator, OperatorType.CoGroupType)
val cls = context.loadClass[Fragment[IndexedSeq[Iterator[_]]]](thisType.getClassName)
val fooResult = new GenericOutputFragment[Foo]()
val fooError = new GenericOutputFragment[Foo]()
val barResult = new GenericOutputFragment[Bar]()
val barError = new GenericOutputFragment[Bar]()
val nResult = new GenericOutputFragment[N]()
val fragment = cls.getConstructor(
classOf[Map[BroadcastId, Broadcasted[_]]],
classOf[Fragment[_]], classOf[Fragment[_]],
classOf[Fragment[_]], classOf[Fragment[_]],
classOf[Fragment[_]])
.newInstance(Map.empty, fooResult, barResult, fooError, barError, nResult)
{
fragment.reset()
val foos = Seq.empty[Foo]
val bars = Seq.empty[Bar]
fragment.add(IndexedSeq(foos.iterator, bars.iterator))
assert(fooResult.iterator.size === 0)
assert(barResult.iterator.size === 0)
assert(fooError.iterator.size === 0)
assert(barError.iterator.size === 0)
val nResults = nResult.iterator.toSeq
assert(nResults.size === 1)
assert(nResults.head.n.get === 10)
}
fragment.reset()
assert(fooResult.iterator.size === 0)
assert(barResult.iterator.size === 0)
assert(fooError.iterator.size === 0)
assert(barError.iterator.size === 0)
assert(nResult.iterator.size === 0)
{
fragment.reset()
val foo = new Foo()
foo.id.modify(1)
val foos = Seq(foo)
val bar = new Bar()
bar.id.modify(10)
bar.fooId.modify(1)
val bars = Seq(bar)
fragment.add(IndexedSeq(foos.iterator, bars.iterator))
val fooResults = fooResult.iterator.toSeq
assert(fooResults.size === 1)
assert(fooResults.head.id.get === foo.id.get)
val barResults = barResult.iterator.toSeq
assert(barResults.size === 1)
assert(barResults.head.id.get === bar.id.get)
assert(barResults.head.fooId.get === bar.fooId.get)
val fooErrors = fooError.iterator.toSeq
assert(fooErrors.size === 0)
val barErrors = barError.iterator.toSeq
assert(barErrors.size === 0)
val nResults = nResult.iterator.toSeq
assert(nResults.size === 1)
assert(nResults.head.n.get === 10)
}
fragment.reset()
assert(fooResult.iterator.size === 0)
assert(barResult.iterator.size === 0)
assert(fooError.iterator.size === 0)
assert(barError.iterator.size === 0)
assert(nResult.iterator.size === 0)
{
fragment.reset()
val foo = new Foo()
foo.id.modify(1)
val foos = Seq(foo)
val bars = (0 until 10).map { i =>
val bar = new Bar()
bar.id.modify(i)
bar.fooId.modify(1)
bar
}
fragment.add(IndexedSeq(foos.iterator, bars.iterator))
val fooResults = fooResult.iterator.toSeq
assert(fooResults.size === 0)
val barResults = barResult.iterator.toSeq
assert(barResults.size === 0)
val fooErrors = fooError.iterator.toSeq
assert(fooErrors.size === 1)
assert(fooErrors.head.id.get === foo.id.get)
val barErrors = barError.iterator.toSeq
assert(barErrors.size === 10)
barErrors.zip(bars).foreach {
case (actual, expected) =>
assert(actual.id.get === expected.id.get)
assert(actual.fooId.get === expected.fooId.get)
}
val nResults = nResult.iterator.toSeq
assert(nResults.size === 1)
assert(nResults.head.n.get === 10)
}
fragment.reset()
assert(fooResult.iterator.size === 0)
assert(barResult.iterator.size === 0)
assert(fooError.iterator.size === 0)
assert(barError.iterator.size === 0)
assert(nResult.iterator.size === 0)
}
for {
projective <- Seq(false, true)
} {
it should s"compile CoGroup operator with BufferType${
if (projective) " with projective model" else ""
}" in {
val operator = OperatorExtractor
.extract(classOf[CoGroup], classOf[CoGroupOperator],
if (projective) "cogroupbp" else "cogroupb")
.input("foos", ClassDescription.of(classOf[Foo]),
new Consumer[InputOptionBuilder] {
override def accept(builder: InputOptionBuilder): Unit = {
builder.group(Groups.parse(Seq("id")))
.attribute(BufferType.SPILL)
}
})
.input("bars", ClassDescription.of(classOf[Bar]),
new Consumer[InputOptionBuilder] {
override def accept(builder: InputOptionBuilder): Unit = {
builder.group(Groups.parse(Seq("fooId"), Seq("+id")))
.attribute(BufferType.VOLATILE)
}
})
.output("fooResult", ClassDescription.of(classOf[Foo]))
.output("barResult", ClassDescription.of(classOf[Bar]))
.output("fooError", ClassDescription.of(classOf[Foo]))
.output("barError", ClassDescription.of(classOf[Bar]))
.output("nResult", ClassDescription.of(classOf[N]))
.argument("n", ImmediateDescription.of(10))
.build()
implicit val context = newOperatorCompilerContext("flowId")
val thisType = OperatorCompiler.compile(operator, OperatorType.CoGroupType)
val cls = context.loadClass[Fragment[IndexedSeq[Iterator[_]]]](thisType.getClassName)
val fooResult = new GenericOutputFragment[Foo]()
val fooError = new GenericOutputFragment[Foo]()
val barResult = new GenericOutputFragment[Bar]()
val barError = new GenericOutputFragment[Bar]()
val nResult = new GenericOutputFragment[N]()
val fragment = cls.getConstructor(
classOf[Map[BroadcastId, Broadcasted[_]]],
classOf[Fragment[_]], classOf[Fragment[_]],
classOf[Fragment[_]], classOf[Fragment[_]],
classOf[Fragment[_]])
.newInstance(Map.empty, fooResult, barResult, fooError, barError, nResult)
{
fragment.reset()
val foos = Seq.empty[Foo]
val bars = Seq.empty[Bar]
fragment.add(IndexedSeq(foos.iterator, bars.iterator))
assert(fooResult.iterator.size === 0)
assert(barResult.iterator.size === 0)
assert(fooError.iterator.size === 0)
assert(barError.iterator.size === 0)
val nResults = nResult.iterator.toSeq
assert(nResults.size === 1)
assert(nResults.head.n.get === 10)
}
fragment.reset()
assert(fooResult.iterator.size === 0)
assert(barResult.iterator.size === 0)
assert(fooError.iterator.size === 0)
assert(barError.iterator.size === 0)
assert(nResult.iterator.size === 0)
{
fragment.reset()
val foo = new Foo()
foo.id.modify(1)
val foos = Seq(foo)
val bar = new Bar()
bar.id.modify(10)
bar.fooId.modify(1)
val bars = Seq(bar)
fragment.add(IndexedSeq(foos.iterator, bars.iterator))
val fooResults = fooResult.iterator.toSeq
assert(fooResults.size === 1)
assert(fooResults.head.id.get === foo.id.get)
val barResults = barResult.iterator.toSeq
assert(barResults.size === 1)
assert(barResults.head.id.get === bar.id.get)
assert(barResults.head.fooId.get === bar.fooId.get)
val fooErrors = fooError.iterator.toSeq
assert(fooErrors.size === 0)
val barErrors = barError.iterator.toSeq
assert(barErrors.size === 0)
val nResults = nResult.iterator.toSeq
assert(nResults.size === 1)
assert(nResults.head.n.get === 10)
}
fragment.reset()
assert(fooResult.iterator.size === 0)
assert(barResult.iterator.size === 0)
assert(fooError.iterator.size === 0)
assert(barError.iterator.size === 0)
assert(nResult.iterator.size === 0)
{
fragment.reset()
val foo = new Foo()
foo.id.modify(1)
val foos = Seq(foo)
val bars = (0 until 10).map { i =>
val bar = new Bar()
bar.id.modify(i)
bar.fooId.modify(1)
bar
}
fragment.add(IndexedSeq(foos.iterator, bars.iterator))
val fooResults = fooResult.iterator.toSeq
assert(fooResults.size === 0)
val barResults = barResult.iterator.toSeq
assert(barResults.size === 0)
val fooErrors = fooError.iterator.toSeq
assert(fooErrors.size === 1)
assert(fooErrors.head.id.get === foo.id.get)
val barErrors = barError.iterator.toSeq
assert(barErrors.size === 10)
barErrors.zip(bars).foreach {
case (actual, expected) =>
assert(actual.id.get === expected.id.get)
assert(actual.fooId.get === expected.fooId.get)
}
val nResults = nResult.iterator.toSeq
assert(nResults.size === 1)
assert(nResults.head.n.get === 10)
}
fragment.reset()
assert(fooResult.iterator.size === 0)
assert(barResult.iterator.size === 0)
assert(fooError.iterator.size === 0)
assert(barError.iterator.size === 0)
assert(nResult.iterator.size === 0)
}
}
it should "compile CoGroup operator with view" in {
val vMarker = MarkerOperator.builder(ClassDescription.of(classOf[Foo]))
.attribute(classOf[PlanMarker], PlanMarker.BROADCAST).build()
val gvMarker = MarkerOperator.builder(ClassDescription.of(classOf[Foo]))
.attribute(classOf[PlanMarker], PlanMarker.BROADCAST).build()
val operator = OperatorExtractor
.extract(classOf[CoGroup], classOf[CoGroupOperator], "cogroupWithView")
.input("foos", ClassDescription.of(classOf[Foo]),
Groups.parse(Seq("id")))
.input("bars", ClassDescription.of(classOf[Bar]),
Groups.parse(Seq("fooId"), Seq("+id")))
.input("v", ClassDescription.of(classOf[Foo]),
new Consumer[Operator.InputOptionBuilder] {
override def accept(builder: Operator.InputOptionBuilder): Unit = {
builder
.unit(OperatorInput.InputUnit.WHOLE)
.group(Groups.parse(Seq.empty, Seq.empty))
.upstream(vMarker.getOutput)
}
})
.input("gv", ClassDescription.of(classOf[Foo]),
new Consumer[Operator.InputOptionBuilder] {
override def accept(builder: Operator.InputOptionBuilder): Unit = {
builder
.unit(OperatorInput.InputUnit.WHOLE)
.group(Groups.parse(Seq("id"), Seq.empty))
.upstream(gvMarker.getOutput)
}
})
.output("fooResult", ClassDescription.of(classOf[Foo]))
.output("barResult", ClassDescription.of(classOf[Bar]))
.output("fooError", ClassDescription.of(classOf[Foo]))
.output("barError", ClassDescription.of(classOf[Bar]))
.output("nResult", ClassDescription.of(classOf[N]))
.argument("n", ImmediateDescription.of(10))
.build()
implicit val context = newOperatorCompilerContext("flowId")
val thisType = OperatorCompiler.compile(operator, OperatorType.CoGroupType)
context.addClass(context.broadcastIds)
val cls = context.loadClass[Fragment[IndexedSeq[Iterator[_]]]](thisType.getClassName)
val broadcastIdsCls = context.loadClass(context.broadcastIds.thisType.getClassName)
def getBroadcastId(marker: MarkerOperator): BroadcastId = {
val sn = marker.getSerialNumber
broadcastIdsCls.getField(context.broadcastIds.getField(sn)).get(null).asInstanceOf[BroadcastId]
}
val fooResult = new GenericOutputFragment[Foo]()
val fooError = new GenericOutputFragment[Foo]()
val barResult = new GenericOutputFragment[Bar]()
val barError = new GenericOutputFragment[Bar]()
val nResult = new GenericOutputFragment[N]()
val view = new MockBroadcast(0, Map(ShuffleKey.empty -> Seq(new Foo())))
val groupview = new MockBroadcast(1,
(0 until 10).map { i =>
val foo = new Foo()
foo.id.modify(i)
new ShuffleKey(WritableSerDe.serialize(foo.id)) -> Seq(foo)
}.toMap)
val fragment = cls.getConstructor(
classOf[Map[BroadcastId, Broadcasted[_]]],
classOf[Fragment[_]], classOf[Fragment[_]],
classOf[Fragment[_]], classOf[Fragment[_]],
classOf[Fragment[_]])
.newInstance(
Map(
getBroadcastId(vMarker) -> view,
getBroadcastId(gvMarker) -> groupview),
fooResult, barResult, fooError, barError, nResult)
{
fragment.reset()
val foos = Seq.empty[Foo]
val bars = Seq.empty[Bar]
fragment.add(IndexedSeq(foos.iterator, bars.iterator))
assert(fooResult.iterator.size === 0)
assert(barResult.iterator.size === 0)
assert(fooError.iterator.size === 0)
assert(barError.iterator.size === 0)
val nResults = nResult.iterator.toSeq
assert(nResults.size === 1)
assert(nResults.head.n.get === 10)
}
fragment.reset()
assert(fooResult.iterator.size === 0)
assert(barResult.iterator.size === 0)
assert(fooError.iterator.size === 0)
assert(barError.iterator.size === 0)
assert(nResult.iterator.size === 0)
{
fragment.reset()
val foo = new Foo()
foo.id.modify(1)
val foos = Seq(foo)
val bar = new Bar()
bar.id.modify(10)
bar.fooId.modify(1)
val bars = Seq(bar)
fragment.add(IndexedSeq(foos.iterator, bars.iterator))
val fooResults = fooResult.iterator.toSeq
assert(fooResults.size === 1)
assert(fooResults.head.id.get === foo.id.get)
val barResults = barResult.iterator.toSeq
assert(barResults.size === 1)
assert(barResults.head.id.get === bar.id.get)
assert(barResults.head.fooId.get === bar.fooId.get)
val fooErrors = fooError.iterator.toSeq
assert(fooErrors.size === 0)
val barErrors = barError.iterator.toSeq
assert(barErrors.size === 0)
val nResults = nResult.iterator.toSeq
assert(nResults.size === 1)
assert(nResults.head.n.get === 10)
}
fragment.reset()
assert(fooResult.iterator.size === 0)
assert(barResult.iterator.size === 0)
assert(fooError.iterator.size === 0)
assert(barError.iterator.size === 0)
assert(nResult.iterator.size === 0)
{
fragment.reset()
val foo = new Foo()
foo.id.modify(1)
val foos = Seq(foo)
val bars = (0 until 10).map { i =>
val bar = new Bar()
bar.id.modify(i)
bar.fooId.modify(1)
bar
}
fragment.add(IndexedSeq(foos.iterator, bars.iterator))
val fooResults = fooResult.iterator.toSeq
assert(fooResults.size === 0)
val barResults = barResult.iterator.toSeq
assert(barResults.size === 0)
val fooErrors = fooError.iterator.toSeq
assert(fooErrors.size === 1)
assert(fooErrors.head.id.get === foo.id.get)
val barErrors = barError.iterator.toSeq
assert(barErrors.size === 10)
barErrors.zip(bars).foreach {
case (actual, expected) =>
assert(actual.id.get === expected.id.get)
assert(actual.fooId.get === expected.fooId.get)
}
val nResults = nResult.iterator.toSeq
assert(nResults.size === 1)
assert(nResults.head.n.get === 10)
}
fragment.reset()
assert(fooResult.iterator.size === 0)
assert(barResult.iterator.size === 0)
assert(fooError.iterator.size === 0)
assert(barError.iterator.size === 0)
assert(nResult.iterator.size === 0)
}
}
object CoGroupOperatorCompilerSpec {
trait FooP {
def getIdOption: IntOption
}
class Foo extends DataModel[Foo] with FooP with Writable {
val id = new IntOption()
val s = new StringOption()
override def reset(): Unit = {
id.setNull()
s.setNull()
}
override def copyFrom(other: Foo): Unit = {
id.copyFrom(other.id)
s.copyFrom(other.s)
}
override def readFields(in: DataInput): Unit = {
id.readFields(in)
s.readFields(in)
}
override def write(out: DataOutput): Unit = {
id.write(out)
s.write(out)
}
def getIdOption: IntOption = id
}
trait BarP {
def getIdOption: IntOption
def getFooIdOption: IntOption
}
class Bar extends DataModel[Bar] with BarP with Writable {
val id = new IntOption()
val fooId = new IntOption()
val s = new StringOption()
override def reset(): Unit = {
id.setNull()
fooId.setNull()
s.setNull()
}
override def copyFrom(other: Bar): Unit = {
id.copyFrom(other.id)
fooId.copyFrom(other.fooId)
s.copyFrom(other.s)
}
override def readFields(in: DataInput): Unit = {
id.readFields(in)
fooId.readFields(in)
s.readFields(in)
}
override def write(out: DataOutput): Unit = {
id.write(out)
fooId.write(out)
s.write(out)
}
def getIdOption: IntOption = id
def getFooIdOption: IntOption = fooId
}
class N extends DataModel[N] with Writable {
val n = new IntOption()
override def reset(): Unit = {
n.setNull()
}
override def copyFrom(other: N): Unit = {
n.copyFrom(other.n)
}
override def readFields(in: DataInput): Unit = {
n.readFields(in)
}
override def write(out: DataOutput): Unit = {
n.write(out)
}
def getIOption: IntOption = n
}
class CoGroupOperator {
private[this] val n = new N
@CoGroup
def cogroup(
foos: JList[Foo], bars: JList[Bar],
fooResult: Result[Foo], barResult: Result[Bar],
fooError: Result[Foo], barError: Result[Bar],
nResult: Result[N],
n: Int,
s: String): Unit = {
if (s != null) {
foos.foreach(_.s.modify(s))
bars.foreach(_.s.modify(s))
}
if (foos.size == 1 && bars.size == 1) {
fooResult.add(foos(0))
barResult.add(bars(0))
} else {
foos.foreach(fooError.add)
bars.foreach(barError.add)
}
this.n.n.modify(n)
nResult.add(this.n)
}
@CoGroup
def cogroupp[F <: FooP, B <: BarP](
foos: JList[F], bars: JList[B],
fooResult: Result[F], barResult: Result[B],
fooError: Result[F], barError: Result[B],
nResult: Result[N],
n: Int): Unit = {
if (foos.size == 1 && bars.size == 1) {
fooResult.add(foos(0))
barResult.add(bars(0))
} else {
foos.foreach(fooError.add)
bars.foreach(barError.add)
}
this.n.n.modify(n)
nResult.add(this.n)
}
@CoGroup
def cogroupb(
foos: JList[Foo], bars: JIterable[Bar],
fooResult: Result[Foo], barResult: Result[Bar],
fooError: Result[Foo], barError: Result[Bar],
nResult: Result[N],
n: Int): Unit = {
val barsIter = bars.iterator
if (foos.isEmpty) {
barsIter.foreach(barError.add)
} else if (!barsIter.hasNext) {
foos.foreach(fooError.add)
} else {
val fooHead = foos.get(0)
val barHead = barsIter.next()
if (foos.size == 1 && !barsIter.hasNext) {
fooResult.add(fooHead)
barResult.add(barHead)
} else {
foos.foreach(fooError.add)
barError.add(barHead)
barsIter.foreach(barError.add)
}
}
this.n.n.modify(n)
nResult.add(this.n)
}
@CoGroup
def cogroupbp[F <: FooP, B <: BarP](
foos: JList[F], bars: JIterable[B],
fooResult: Result[F], barResult: Result[B],
fooError: Result[F], barError: Result[B],
nResult: Result[N],
n: Int): Unit = {
val barsIter = bars.iterator
if (foos.isEmpty) {
barsIter.foreach(barError.add)
} else if (!barsIter.hasNext) {
foos.foreach(fooError.add)
} else {
val fooHead = foos.get(0)
val barHead = barsIter.next()
if (foos.size == 1 && !barsIter.hasNext) {
fooResult.add(fooHead)
barResult.add(barHead)
} else {
foos.foreach(fooError.add)
barError.add(barHead)
barsIter.foreach(barError.add)
}
}
this.n.n.modify(n)
nResult.add(this.n)
}
@CoGroup
def cogroupWithView(
foos: JList[Foo], bars: JList[Bar],
v: View[Foo], gv: GroupView[Foo],
fooResult: Result[Foo], barResult: Result[Bar],
fooError: Result[Foo], barError: Result[Bar],
nResult: Result[N],
n: Int): Unit = {
if (foos.size == 1 && bars.size == 1) {
fooResult.add(foos(0))
barResult.add(bars(0))
} else {
foos.foreach(fooError.add)
bars.foreach(barError.add)
}
this.n.n.modify(n)
nResult.add(this.n)
}
}
}
| asakusafw/asakusafw-spark | compiler/src/test/scala/com/asakusafw/spark/compiler/operator/user/CoGroupOperatorCompilerSpec.scala | Scala | apache-2.0 | 29,462 |
package de.tototec.sbuild
import java.io.File
trait ProjectReader {
/**
* Read a project file and create a configured Project instance associated by that file.
*
* If a projectPool is given, the newly created project will be added to that pool.
*
*/
def readAndCreateProject(projectFile: File, properties: Map[String, String], projectPool: Option[ProjectPool], monitor: Option[CmdlineMonitor]): Project
} | SBuild-org/sbuild | de.tototec.sbuild/src/main/scala/de/tototec/sbuild/ProjectReader.scala | Scala | apache-2.0 | 426 |
package com.twitter.finagle.memcached.integration
import com.twitter.finagle.Service
import com.twitter.finagle.builder.ClientBuilder
import com.twitter.finagle.memcached.protocol._
import com.twitter.finagle.memcached.protocol.text.Memcached
import com.twitter.finagle.memcached.util.ChannelBufferUtils._
import com.twitter.util.TimeConversions._
import com.twitter.util.{Await, Time}
import java.net.{InetAddress, InetSocketAddress}
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{BeforeAndAfter, FunSuite}
@RunWith(classOf[JUnitRunner])
class InterpreterServiceTest extends FunSuite with BeforeAndAfter {
var server: InProcessMemcached = null
var client: Service[Command, Response] = null
before {
server = new InProcessMemcached(new InetSocketAddress(InetAddress.getLoopbackAddress, 0))
val address = server.start().localAddress
client = ClientBuilder()
.hosts(address)
.codec(new Memcached)
.hostConnectionLimit(1)
.build()
}
after {
server.stop()
}
test("set & get") {
val key = "key"
val value = "value"
val zero = "0"
val result = for {
_ <- client(Delete(key))
_ <- client(Set(key, 0, Time.epoch, value))
r <- client(Get(Seq(key)))
} yield r
assert(Await.result(result, 1.second) === Values(Seq(Value(key, value, None, Some(zero)))))
assert(client.isAvailable)
}
test("quit") {
val result = client(Quit())
assert(Await.result(result) === NoOp())
}
}
| kristofa/finagle | finagle-memcached/src/test/scala/com/twitter/finagle/memcached/integration/InterpreterServiceTest.scala | Scala | apache-2.0 | 1,526 |
package com.artclod.mathml.scalar.apply
import org.specs2.runner.JUnitRunner
import org.junit.runner.RunWith
import org.junit.runner.RunWith
import play.api.test._
import play.api.test.Helpers._
import org.specs2.mutable._
import com.artclod.mathml._
import com.artclod.mathml.scalar._
// LATER try out http://rlegendi.github.io/specs2-runner/ and remove RunWith
@RunWith(classOf[JUnitRunner])
class ApplyDivideSpec extends Specification {
"eval" should {
"do division" in {
ApplyDivide(`1`, `2`).eval().get must beEqualTo(.5)
}
"be zero if numerator is zero" in {
ApplyDivide(`0`, `2`).eval().get must beEqualTo(0)
}
"be failure double divisions evals to zero but numerator is not zero" in {
ApplyDivide(Cn(1E-300), Cn(1E+300)).eval() must beFailedTry
}
}
"variables" should {
"be empty if elements are constant" in {
ApplyDivide(`1`, `2`).variables must beEmpty
}
"be x if an element constains an x" in {
ApplyDivide(x, `2`).variables must beEqualTo(Set("x"))
}
"be y if element constains a y" in {
ApplyDivide(`1`, y).variables must beEqualTo(Set("y"))
}
"be x & y if element constains x & y" in {
ApplyDivide(x, y).variables must beEqualTo(Set("x", "y"))
}
}
"c" should {
"return correct division if numerator and denominator are numbers " in {
ApplyDivide(`6`, `4`).c.get must beEqualTo(`1.5`)
}
"return 0 if numerator is 0 " in {
ApplyDivide(`0`, `4`).c.get must beEqualTo(`0`)
}
"return 1 if numerator and denominator are equal" in {
ApplyDivide(`5`, `5`).c.get must beEqualTo(`1`)
}
"fail if not a constant " in {
ApplyDivide(x, `4`).c must beEmpty
}
}
"s" should {
"return 0 if numerator is 0 (and denominator is not)" in {
ApplyDivide(`0`, `6`).s must beEqualTo(`0`)
}
"return 1 if numerator and denominator are equal (and non zero)" in {
ApplyDivide(`4`, `4`).s must beEqualTo(`1`)
}
"return numerator if denominator is 1" in {
ApplyDivide(x, `1`).s must beEqualTo(x)
}
"simplify if numerator is also a divide" in {
ApplyDivide(x / `2`, y).s must beEqualTo(x / (`2` * y))
}
"simplify if denominator is also a divide" in {
ApplyDivide(x, y / `3`).s must beEqualTo((`3` * x) / y)
}
"remain unchanged if nothing can be simplified" in {
ApplyDivide(x, `3`).s must beEqualTo(ApplyDivide(x, `3`))
}
}
"d" should {
"obey the quotient rule: (f/g)' = (f'g - g'f)/g^2" in {
ApplyDivide(F, G).dx must beEqualTo((Fdx * G - Gdx * F) / (G ^ `2`))
}
}
"toText" should {
"handle 3 / 7" in {
ApplyDivide(3, 7).toMathJS must beEqualTo("(3 / 7)")
}
}
} | kristiankime/web-education-games | test/com/artclod/mathml/scalar/apply/ApplyDivideSpec.scala | Scala | mit | 2,611 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import org.apache.spark.sql.TestData._
import org.apache.spark.sql.functions._
import org.apache.spark.sql.test.TestSQLContext._
import org.apache.spark.sql.test.TestSQLContext.implicits._
class DataFrameJoinSuite extends QueryTest {
test("join - join using") {
val df = Seq(1, 2, 3).map(i => (i, i.toString)).toDF("int", "str")
val df2 = Seq(1, 2, 3).map(i => (i, (i + 1).toString)).toDF("int", "str")
checkAnswer(
df.join(df2, "int"),
Row(1, "1", "2") :: Row(2, "2", "3") :: Row(3, "3", "4") :: Nil)
}
test("join - join using self join") {
val df = Seq(1, 2, 3).map(i => (i, i.toString)).toDF("int", "str")
// self join
checkAnswer(
df.join(df, "int"),
Row(1, "1", "1") :: Row(2, "2", "2") :: Row(3, "3", "3") :: Nil)
}
test("join - self join") {
val df1 = testData.select(testData("key")).as('df1)
val df2 = testData.select(testData("key")).as('df2)
checkAnswer(
df1.join(df2, $"df1.key" === $"df2.key"),
sql("SELECT a.key, b.key FROM testData a JOIN testData b ON a.key = b.key").collect().toSeq)
}
test("join - using aliases after self join") {
val df = Seq(1, 2, 3).map(i => (i, i.toString)).toDF("int", "str")
checkAnswer(
df.as('x).join(df.as('y), $"x.str" === $"y.str").groupBy("x.str").count(),
Row("1", 1) :: Row("2", 1) :: Row("3", 1) :: Nil)
checkAnswer(
df.as('x).join(df.as('y), $"x.str" === $"y.str").groupBy("y.str").count(),
Row("1", 1) :: Row("2", 1) :: Row("3", 1) :: Nil)
}
test("[SPARK-6231] join - self join auto resolve ambiguity") {
val df = Seq((1, "1"), (2, "2")).toDF("key", "value")
checkAnswer(
df.join(df, df("key") === df("key")),
Row(1, "1", 1, "1") :: Row(2, "2", 2, "2") :: Nil)
checkAnswer(
df.join(df.filter($"value" === "2"), df("key") === df("key")),
Row(2, "2", 2, "2") :: Nil)
checkAnswer(
df.join(df, df("key") === df("key") && df("value") === 1),
Row(1, "1", 1, "1") :: Nil)
val left = df.groupBy("key").agg(count("*"))
val right = df.groupBy("key").agg(sum("key"))
checkAnswer(
left.join(right, left("key") === right("key")),
Row(1, 1, 1, 1) :: Row(2, 1, 2, 2) :: Nil)
}
}
| andrewor14/iolap | sql/core/src/test/scala/org/apache/spark/sql/DataFrameJoinSuite.scala | Scala | apache-2.0 | 3,067 |
package jp.co.cyberagent.aeromock.core.bootstrap
import org.apache.commons.lang3.ClassUtils
import org.slf4j.LoggerFactory
import jp.co.cyberagent.aeromock.helper._
/**
* manager to control bootstrap.
* @author stormcat24
*/
object BootstrapManager {
val LOG = LoggerFactory.getLogger(this.getClass())
def delegate = {
EnabledMode.values.map { mode =>
(mode, trye {
val bootstrapClass = ClassUtils.getClass(mode.fqdn).asInstanceOf[Class[_ <: Bootstrap]]
if (bootstrapClass != null) {
bootstrapClass.newInstance.process
LOG.info(s"## Prepared ${mode} module.")
}
})
}
}
}
sealed abstract class EnabledMode(val fqdn: String)
object EnabledMode {
case object FREEMARKER extends EnabledMode("jp.co.cyberagent.aeromock.template.freemarker.FreemarkerBootstrap")
case object HANDLEBARS extends EnabledMode("jp.co.cyberagent.aeromock.template.handlebars.HandlebarsBootstrap")
case object JADE4j extends EnabledMode("jp.co.cyberagent.aeromock.template.jade4j.Jade4jBootstrap")
case object VELOCITY extends EnabledMode("jp.co.cyberagent.aeromock.template.velocity.VelocityBootstrap")
case object GROOVY_TEMPLATE extends EnabledMode("jp.co.cyberagent.aeromock.template.groovytemplate.GroovyTemplateBootstrap")
case object THYMELEAF extends EnabledMode("jp.co.cyberagent.aeromock.template.thymeleaf.ThymeleafBootstrap")
val values = Array[EnabledMode](
FREEMARKER,
HANDLEBARS,
JADE4j,
VELOCITY,
GROOVY_TEMPLATE,
THYMELEAF
)
}
| CyberAgent/aeromock | aeromock-server/src/main/scala/jp/co/cyberagent/aeromock/core/bootstrap/BootstrapManager.scala | Scala | mit | 1,542 |
package com.example.service
import java.io.{File, FileOutputStream}
import java.net.URI
import akka.actor.ActorSystem
import akka.http.scaladsl.Http
import akka.http.scaladsl.model.{HttpRequest, StatusCodes}
import akka.stream.Materializer
import akka.stream.scaladsl.Sink
import com.typesafe.scalalogging.Logger
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
/**
* Created by takao on 2016/11/06.
*/
trait HttpServiceComponent {
val httpService: HttpService
class HttpService {
private[this] val logger = Logger[HttpService]
def download(url: String, path: String)(implicit actorSystem: ActorSystem, materializer: Materializer) = {
val outFile = new File(path)
if (!outFile.exists()) {
val parent = outFile.getParentFile()
if(!parent.exists()) {
parent.mkdirs()
}
val downloadTask = Http().singleRequest(HttpRequest(uri = url))
val response = Await.result(downloadTask, 5.minutes)
response.status match {
case StatusCodes.OK =>
val channel = new FileOutputStream(path).getChannel
val task = response.entity.getDataBytes().runWith(Sink.foreach(b => channel.write(b.asByteBuffer)), materializer)
task.andThen { case _ =>
channel.close()
}
case _ => // error 無視
logger.warn(s"ダウンロードに失敗しちゃったの: ${response.status} ($url)")
}
}
}
}
}
| SakaiTakao/TwitterImageDownloader | src/main/scala/com/example/service/HttpServiceComponent.scala | Scala | bsd-2-clause | 1,607 |
package mavigator.index
import scala.language.implicitConversions
import org.scalajs.dom.html
import rx._
import scala.util.Try
import scala.util.Success
import scala.util.Failure
import scalatags.JsDom.all._
object Util {
/**
* Copied from https://github.com/lihaoyi/workbench-example-app/blob/todomvc/src/main/scala/example/Framework.scala
*
* Sticks some Rx into a Scalatags fragment, which means hooking up an Obs
* to propagate changes into the DOM via the element's ID. Monkey-patches
* the Obs onto the element itself so we have a reference to kill it when
* the element leaves the DOM (e.g. it gets deleted).
*/
implicit def rxMod(r: Rx[HtmlTag])(implicit owner: Ctx.Owner): Frag = {
def rSafe = r.toTry match {
case Success(v) => v.render
case Failure(e) => span(e.toString, backgroundColor := "red").render
}
var last = rSafe
r.triggerLater{
val newLast = rSafe
last.parentElement.replaceChild(newLast, last)
last = newLast
}
bindNode(last)
}
}
| project-condor/mavigator | mavigator-cockpit/src/main/scala/mavigator/index/Util.scala | Scala | gpl-3.0 | 1,044 |
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package scala
package reflect
package internal
import scala.language.implicitConversions
import scala.io.Codec
trait Names extends api.Names {
private final val HASH_SIZE = 0x8000
private final val HASH_MASK = 0x7FFF
private final val NAME_SIZE = 0x20000
final val nameDebug = false
// Ideally we would just synchronize unconditionally and let HotSpot's Biased Locking
// kick in in the compiler universe, where access to the lock is single threaded. But,
// objects created in the first 4seconds of the JVM startup aren't eligible for biased
// locking.
//
// We might also be able to accept the performance hit, but we don't have tools to
// detect performance regressions.
//
// Discussion: https://groups.google.com/forum/#!search/biased$20scala-internals/scala-internals/0cYB7SkJ-nM/47MLhsgw8jwJ
protected def synchronizeNames: Boolean = false
private val nameLock: Object = new Object
/** Memory to store all names sequentially. */
var chrs: Array[Char] = new Array[Char](NAME_SIZE)
private var nc = 0
/** Hashtable for finding term names quickly. */
private val termHashtable = new Array[TermName](HASH_SIZE)
/** Hashtable for finding type names quickly. */
private val typeHashtable = new Array[TypeName](HASH_SIZE)
/**
* The hashcode of a name depends on the first, the last and the middle character,
* and the length of the name.
*/
private def hashValue(cs: Array[Char], offset: Int, len: Int): Int =
if (len > 0)
(len * (41 * 41 * 41) +
cs(offset) * (41 * 41) +
cs(offset + len - 1) * 41 +
cs(offset + (len >> 1)))
else 0
/** Is (the ASCII representation of) name at given index equal to
* cs[offset..offset+len-1]?
*/
private def equals(index: Int, cs: Array[Char], offset: Int, len: Int): Boolean = {
var i = 0
while ((i < len) && (chrs(index + i) == cs(offset + i)))
i += 1
i == len
}
/** Enter characters into chrs array. */
private def enterChars(cs: Array[Char], offset: Int, len: Int) {
var i = 0
while (i < len) {
if (nc + i == chrs.length) {
val newchrs = new Array[Char](chrs.length * 2)
scala.compat.Platform.arraycopy(chrs, 0, newchrs, 0, chrs.length)
chrs = newchrs
}
chrs(nc + i) = cs(offset + i)
i += 1
}
if (len == 0) nc += 1
else nc = nc + len
}
/** Create a term name from the characters in cs[offset..offset+len-1]. */
final def newTermName(cs: Array[Char], offset: Int, len: Int): TermName =
newTermName(cs, offset, len, cachedString = null)
final def newTermName(cs: Array[Char]): TermName = newTermName(cs, 0, cs.length)
final def newTypeName(cs: Array[Char]): TypeName = newTypeName(cs, 0, cs.length)
/** Create a term name from the characters in cs[offset..offset+len-1].
* TODO - have a mode where name validation is performed at creation time
* (e.g. if a name has the string "$class" in it, then fail if that
* string is not at the very end.)
*
* @param len0 the length of the name. Negative lengths result in empty names.
*/
final def newTermName(cs: Array[Char], offset: Int, len0: Int, cachedString: String): TermName = {
def body = {
require(offset >= 0, "offset must be non-negative, got " + offset)
val len = math.max(len0, 0)
val h = hashValue(cs, offset, len) & HASH_MASK
var n = termHashtable(h)
while ((n ne null) && (n.length != len || !equals(n.start, cs, offset, len)))
n = n.next
if (n ne null) n
else {
// The logic order here is future-proofing against the possibility
// that name.toString will become an eager val, in which case the call
// to enterChars cannot follow the construction of the TermName.
var startIndex = 0
if (cs == chrs) {
// Optimize for subName, the new name is already stored in chrs
startIndex = offset
} else {
startIndex = nc
enterChars(cs, offset, len)
}
val next = termHashtable(h)
val termName =
if (cachedString ne null) new TermName_S(startIndex, len, next, cachedString)
else new TermName_R(startIndex, len, next)
// Add the new termName to the hashtable only after it's been fully constructed
termHashtable(h) = termName
termName
}
}
if (synchronizeNames) nameLock.synchronized(body) else body
}
final def newTypeName(cs: Array[Char], offset: Int, len: Int, cachedString: String): TypeName =
newTermName(cs, offset, len, cachedString).toTypeName
/** Create a term name from string. */
@deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overridden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala
def newTermName(s: String): TermName = newTermName(s.toCharArray(), 0, s.length(), null)
/** Create a type name from string. */
@deprecatedOverriding("To synchronize, use `override def synchronizeNames = true`", "2.11.0") // overridden in https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core/src/scala/tools/eclipse/ScalaPresentationCompiler.scala
def newTypeName(s: String): TypeName = newTermName(s).toTypeName
/** Create a term name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */
final def newTermName(bs: Array[Byte], offset: Int, len: Int): TermName = {
val chars = Codec.fromUTF8(bs, offset, len)
newTermName(chars, 0, chars.length)
}
final def newTermNameCached(s: String): TermName =
newTermName(s.toCharArray(), 0, s.length(), cachedString = s)
final def newTypeNameCached(s: String): TypeName =
newTypeName(s.toCharArray(), 0, s.length(), cachedString = s)
/** Create a type name from the characters in cs[offset..offset+len-1]. */
final def newTypeName(cs: Array[Char], offset: Int, len: Int): TypeName =
newTermName(cs, offset, len, cachedString = null).toTypeName
/** Create a type name from the UTF8 encoded bytes in bs[offset..offset+len-1]. */
final def newTypeName(bs: Array[Byte], offset: Int, len: Int): TypeName =
newTermName(bs, offset, len).toTypeName
/**
* Used by the GenBCode backend to lookup type names that are known to already exist. This method
* might be invoked in a multi-threaded setting. Invoking newTypeName instead might be unsafe.
*
* can-multi-thread: names are added to the hash tables only after they are fully constructed.
*/
final def lookupTypeName(cs: Array[Char]): TypeName = {
val hash = hashValue(cs, 0, cs.length) & HASH_MASK
var typeName = typeHashtable(hash)
while ((typeName ne null) && (typeName.length != cs.length || !equals(typeName.start, cs, 0, cs.length))) {
typeName = typeName.next
}
assert(typeName != null, s"TypeName ${new String(cs)} not yet created.")
typeName
}
// Classes ----------------------------------------------------------------------
/** The name class.
* TODO - resolve schizophrenia regarding whether to treat Names as Strings
* or Strings as Names. Give names the key functions the absence of which
* make people want Strings all the time.
*/
sealed abstract class Name(protected val index: Int, protected val len: Int) extends NameApi {
type ThisNameType >: Null <: Name
protected[this] def thisName: ThisNameType
// Note that "Name with ThisNameType" should be redundant
// because ThisNameType <: Name, but due to SI-6161 the
// compile loses track of this fact.
/** Index into name table */
def start: Int = index
/** The next name in the same hash bucket. */
def next: Name with ThisNameType
/** The length of this name. */
final def length: Int = len
final def isEmpty = length == 0
final def nonEmpty = !isEmpty
def nameKind: String
def isTermName: Boolean
def isTypeName: Boolean
def toTermName: TermName
def toTypeName: TypeName
def companionName: Name
def bothNames: List[Name] = List(toTermName, toTypeName)
/** Return the subname with characters from from to to-1. */
def subName(from: Int, to: Int): Name with ThisNameType
/** Return a new name of the same variety. */
def newName(str: String): Name with ThisNameType
/** Return a new name based on string transformation. */
def mapName(f: String => String): Name with ThisNameType = newName(f(toString))
/** Copy bytes of this name to buffer cs, starting at position `offset`. */
final def copyChars(cs: Array[Char], offset: Int) =
scala.compat.Platform.arraycopy(chrs, index, cs, offset, len)
/** @return the ascii representation of this name */
final def toChars: Array[Char] = { // used by ide
val cs = new Array[Char](len)
copyChars(cs, 0)
cs
}
/** @return the hash value of this name */
final override def hashCode(): Int = index
/** @return true if the string value of this name is equal
* to the string value of the given name or String.
*/
def string_==(that: Name): Boolean = (that ne null) && (toString == that.toString)
def string_==(that: String): Boolean = (that ne null) && (toString == that)
/****
* This has been quite useful to find places where people are comparing
* a TermName and a TypeName, or a Name and a String.
override def equals(other: Any) = paranoidEquals(other)
private def paranoidEquals(other: Any): Boolean = {
val cmp = this eq other.asInstanceOf[AnyRef]
if (cmp || !nameDebug)
return cmp
other match {
case x: String =>
Console.println(s"Compared $debugString and String '$x'")
case x: Name =>
if (this.isTermName != x.isTermName) {
val panic = this.toTermName == x.toTermName
Console.println("Compared '%s' and '%s', one term, one type.%s".format(this, x,
if (panic) " And they contain the same name string!"
else ""
))
}
case _ =>
}
false
}
****/
/** @return the i'th Char of this name */
final def charAt(i: Int): Char = chrs(index + i)
/** @return the index of first occurrence of char c in this name, length if not found */
final def pos(c: Char): Int = pos(c, 0)
/** @return the index of first occurrence of s in this name, length if not found */
final def pos(s: String): Int = pos(s, 0)
/** Returns the index of the first occurrence of character c in
* this name from start, length if not found.
*
* @param c the character
* @param start the index from which to search
* @return the index of the first occurrence of c
*/
final def pos(c: Char, start: Int): Int = {
var i = start
while (i < len && chrs(index + i) != c) i += 1
i
}
/** Returns the index of the first occurrence of nonempty string s
* in this name from start, length if not found.
*
* @param s the string
* @param start the index from which to search
* @return the index of the first occurrence of s
*/
final def pos(s: String, start: Int): Int = {
var i = pos(s.charAt(0), start)
while (i + s.length() <= len) {
var j = 1
while (s.charAt(j) == chrs(index + i + j)) {
j += 1
if (j == s.length()) return i
}
i = pos(s.charAt(0), i + 1)
}
len
}
/** Returns the index of last occurrence of char c in this
* name, -1 if not found.
*
* @param c the character
* @return the index of the last occurrence of c
*/
final def lastPos(c: Char): Int = lastPos(c, len - 1)
/** Returns the index of the last occurrence of char c in this
* name from start, -1 if not found.
*
* @param c the character
* @param start the index from which to search
* @return the index of the last occurrence of c
*/
final def lastPos(c: Char, start: Int): Int = {
var i = start
while (i >= 0 && chrs(index + i) != c) i -= 1
i
}
/** Does this name start with prefix? */
final def startsWith(prefix: Name): Boolean = startsWith(prefix, 0)
/** Does this name start with prefix at given start index? */
final def startsWith(prefix: Name, start: Int): Boolean = {
var i = 0
while (i < prefix.length && start + i < len &&
chrs(index + start + i) == chrs(prefix.start + i))
i += 1
i == prefix.length
}
final def startsWith(prefix: String, start: Int): Boolean = {
var i = 0
while (i < prefix.length && start + i < len &&
chrs(index + start + i) == prefix.charAt(i))
i += 1
i == prefix.length
}
/** Does this name end with suffix? */
final def endsWith(suffix: Name): Boolean = endsWith(suffix, len)
/** Does this name end with suffix just before given end index? */
final def endsWith(suffix: Name, end: Int): Boolean = {
var i = 1
while (i <= suffix.length && i <= end &&
chrs(index + end - i) == chrs(suffix.start + suffix.length - i))
i += 1
i > suffix.length
}
final def endsWith(suffix: String, end: Int): Boolean = {
var i = 1
while (i <= suffix.length && i <= end &&
chrs(index + end - i) == suffix.charAt(suffix.length - i))
i += 1
i > suffix.length
}
final def containsName(subname: String): Boolean = containsName(newTermName(subname))
final def containsName(subname: Name): Boolean = {
var start = 0
val last = len - subname.length
while (start <= last && !startsWith(subname, start)) start += 1
start <= last
}
final def containsChar(ch: Char): Boolean = {
var i = index
val max = index + len
while (i < max) {
if (chrs(i) == ch)
return true
i += 1
}
false
}
/** Some thoroughly self-explanatory convenience functions. They
* assume that what they're being asked to do is known to be valid.
*/
final def startChar: Char = this charAt 0
final def endChar: Char = this charAt len - 1
final def startsWith(char: Char): Boolean = len > 0 && startChar == char
final def startsWith(name: String): Boolean = startsWith(name, 0)
final def endsWith(char: Char): Boolean = len > 0 && endChar == char
final def endsWith(name: String): Boolean = endsWith(name, len)
/** Rewrite the confusing failure indication via result == length to
* the normal failure indication via result == -1.
*/
private def fixIndexOf(idx: Int): Int = if (idx == length) -1 else idx
def indexOf(ch: Char) = fixIndexOf(pos(ch))
def indexOf(ch: Char, fromIndex: Int) = fixIndexOf(pos(ch, fromIndex))
def indexOf(s: String) = fixIndexOf(pos(s))
/** The lastPos methods already return -1 on failure. */
def lastIndexOf(ch: Char): Int = lastPos(ch)
def lastIndexOf(s: String): Int = toString lastIndexOf s
/** Replace all occurrences of `from` by `to` in
* name; result is always a term name.
*/
def replace(from: Char, to: Char): Name = {
val cs = new Array[Char](len)
var i = 0
while (i < len) {
val ch = charAt(i)
cs(i) = if (ch == from) to else ch
i += 1
}
newTermName(cs, 0, len)
}
/* TODO - reconcile/fix that encode returns a Name but
* decode returns a String.
*/
/** !!! Duplicative but consistently named.
*/
def decoded: String = decode
def encoded: String = "" + encode
// def decodedName: ThisNameType = newName(decoded)
def encodedName: ThisNameType = encode
/** Replace operator symbols by corresponding \\$op_name. */
def encode: ThisNameType = {
val str = toString
val res = NameTransformer.encode(str)
if (res == str) thisName else newName(res)
}
/** Replace \\$op_name by corresponding operator symbol. */
def decode: String = {
if (this containsChar '$') {
val str = toString
val res = NameTransformer.decode(str)
if (res == str) str
else res
}
else toString
}
/** TODO - find some efficiency. */
def append(ch: Char) = newName(toString + ch)
def append(suffix: String) = newName(toString + suffix)
def append(suffix: Name) = newName(toString + suffix)
def append(separator: Char, suffix: Name) = newName(toString + separator + suffix)
def prepend(prefix: String) = newName("" + prefix + this)
def decodedName: ThisNameType = newName(decode)
def isOperatorName: Boolean = decode != toString // used by ide
def longString: String = nameKind + " " + decode
def debugString = { val s = decode ; if (isTypeName) s + "!" else s }
}
implicit def AnyNameOps(name: Name): NameOps[Name] = new NameOps(name)
implicit def TermNameOps(name: TermName): NameOps[TermName] = new NameOps(name)
implicit def TypeNameOps(name: TypeName): NameOps[TypeName] = new NameOps(name)
/** FIXME: This is a good example of something which is pure "value class" but cannot
* reap the benefits because an (unused) $outer pointer so it is not single-field.
*/
final class NameOps[T <: Name](name: T) {
import NameTransformer._
def stripSuffix(suffix: String): T = if (name endsWith suffix) dropRight(suffix.length) else name // OPT avoid creating a Name with `suffix`
def stripSuffix(suffix: Name): T = if (name endsWith suffix) dropRight(suffix.length) else name
def take(n: Int): T = name.subName(0, n).asInstanceOf[T]
def drop(n: Int): T = name.subName(n, name.length).asInstanceOf[T]
def dropRight(n: Int): T = name.subName(0, name.length - n).asInstanceOf[T]
def dropLocal: TermName = name.toTermName stripSuffix LOCAL_SUFFIX_STRING
def dropSetter: TermName = name.toTermName stripSuffix SETTER_SUFFIX_STRING
def dropModule: T = this stripSuffix MODULE_SUFFIX_STRING
def localName: TermName = getterName append LOCAL_SUFFIX_STRING
def setterName: TermName = getterName append SETTER_SUFFIX_STRING
def getterName: TermName = dropTraitSetterSeparator.dropSetter.dropLocal
private def dropTraitSetterSeparator: TermName =
name indexOf TRAIT_SETTER_SEPARATOR_STRING match {
case -1 => name.toTermName
case idx => name.toTermName drop idx drop TRAIT_SETTER_SEPARATOR_STRING.length
}
}
implicit val NameTag = ClassTag[Name](classOf[Name])
/** A name that contains no operator chars nor dollar signs.
* TODO - see if it's any faster to do something along these lines.
* Cute: now that exhaustivity kind of works, the mere presence of
* this trait causes TermName and TypeName to stop being exhaustive.
* Commented out.
*/
// trait AlphaNumName extends Name {
// final override def encode = thisName
// final override def decodedName = thisName
// final override def decode = toString
// final override def isOperatorName = false
// }
/** TermName_S and TypeName_S have fields containing the string version of the name.
* TermName_R and TypeName_R recreate it each time toString is called.
*/
private final class TermName_S(index0: Int, len0: Int, next0: TermName, override val toString: String) extends TermName(index0, len0, next0) {
protected def createCompanionName(next: TypeName): TypeName = new TypeName_S(index, len, next, toString)
override def newName(str: String): TermName = newTermNameCached(str)
}
private final class TypeName_S(index0: Int, len0: Int, next0: TypeName, override val toString: String) extends TypeName(index0, len0, next0) {
override def newName(str: String): TypeName = newTypeNameCached(str)
}
private final class TermName_R(index0: Int, len0: Int, next0: TermName) extends TermName(index0, len0, next0) {
protected def createCompanionName(next: TypeName): TypeName = new TypeName_R(index, len, next)
override def toString = new String(chrs, index, len)
}
private final class TypeName_R(index0: Int, len0: Int, next0: TypeName) extends TypeName(index0, len0, next0) {
override def toString = new String(chrs, index, len)
}
// SYNCNOTE: caller to constructor must synchronize if `synchronizeNames` is enabled
sealed abstract class TermName(index0: Int, len0: Int, val next: TermName) extends Name(index0, len0) with TermNameApi {
type ThisNameType = TermName
protected[this] def thisName: TermName = this
def isTermName: Boolean = true
def isTypeName: Boolean = false
def toTermName: TermName = this
def toTypeName: TypeName = {
def body = {
// Re-computing the hash saves a field for storing it in the TermName
val h = hashValue(chrs, index, len) & HASH_MASK
var n = typeHashtable(h)
while ((n ne null) && n.start != index)
n = n.next
if (n ne null) n
else {
val next = typeHashtable(h)
val typeName = createCompanionName(next)
// Add the new typeName to the hashtable only after it's been fully constructed
typeHashtable(h) = typeName
typeName
}
}
if (synchronizeNames) nameLock.synchronized(body) else body
}
def newName(str: String): TermName = newTermName(str)
def companionName: TypeName = toTypeName
def subName(from: Int, to: Int): TermName =
newTermName(chrs, start + from, to - from)
def nameKind = "term"
/** SYNCNOTE: caller must synchronize if `synchronizeNames` is enabled */
protected def createCompanionName(next: TypeName): TypeName
}
implicit val TermNameTag = ClassTag[TermName](classOf[TermName])
object TermName extends TermNameExtractor {
def apply(s: String) = newTermName(s)
def unapply(name: TermName): Option[String] = Some(name.toString)
}
sealed abstract class TypeName(index0: Int, len0: Int, val next: TypeName) extends Name(index0, len0) with TypeNameApi {
type ThisNameType = TypeName
protected[this] def thisName: TypeName = this
def isTermName: Boolean = false
def isTypeName: Boolean = true
def toTermName: TermName = {
def body = {
// Re-computing the hash saves a field for storing it in the TypeName
val h = hashValue(chrs, index, len) & HASH_MASK
var n = termHashtable(h)
while ((n ne null) && n.start != index)
n = n.next
assert (n ne null, s"TypeName $this is missing its correspondent")
n
}
if (synchronizeNames) nameLock.synchronized(body) else body
}
def toTypeName: TypeName = this
def newName(str: String): TypeName = newTypeName(str)
def companionName: TermName = toTermName
def subName(from: Int, to: Int): TypeName =
newTypeName(chrs, start + from, to - from)
def nameKind = "type"
override def decode = if (nameDebug) super.decode + "!" else super.decode
}
implicit val TypeNameTag = ClassTag[TypeName](classOf[TypeName])
object TypeName extends TypeNameExtractor {
def apply(s: String) = newTypeName(s)
def unapply(name: TypeName): Option[String] = Some(name.toString)
}
}
| slothspot/scala | src/reflect/scala/reflect/internal/Names.scala | Scala | bsd-3-clause | 23,688 |
object Test {
trait Engine1
implicit class EngineTools1[Params, R](e: Engine1) {
def asRequirement: Requirement1[Params, R] = ???
}
trait Requirement1[Params, R] {
def pathsIncludingSelf: Traversable[List[Reportable1[Params, R]]]
}
trait Reportable1[Params, R]
// "missing parameter type" error was swallowed in 2.11.0 leading to a crash
// in the backend.
//
// This error is itself a regression (or at least a change) in 2.11.0-M7,
// specifically in scala/bug#7944. The type parameters to the implicit view
// `EngineTools1` are undetermined, and are now treated as type variables
// in the expected type of the closure argument to `withFilter`.
for (path: List[Any] <- (null : Engine1).asRequirement.pathsIncludingSelf.toList) {
???
}
}
| scala/scala | test/files/neg/t8675b.scala | Scala | apache-2.0 | 785 |
/* SimpleApp.scala */
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.SparkConf
import au.com.bytecode.opencsv.CSVParser
import org.apache.spark.rdd.RDD
object sparkP1 {
def dropHeader(data: RDD[String]): RDD[String] = {
data.mapPartitionsWithIndex((idx, lines) => {
if (idx == 0) {
lines.drop(1)
}
lines
})
}
def main(args: Array[String]) {
val file = "SacramentocrimeJanuary2006.csv" // Should be some file on your system
val conf = new SparkConf().setAppName("most happen crime")
val sc = new SparkContext(conf)
val csv = sc.textFile(file).cache()
val csvNoHeader: RDD[String] = dropHeader(csv)
val extractData=csvNoHeader.mapPartitions(lines => {
val parser = new CSVParser(',')
lines.map(line => {
val columns=parser.parseLine(line)
//I used column num of the crim
Array(columns(6)).mkString(",")
})
})
val pairs=extractData.map(x => (x,1))
val result = pairs.reduceByKey(_+_)
val maxResult = result.max()(new Ordering[Tuple2[String, Int]]() {
override def compare(x: (String, Int), y: (String, Int)): Int =
Ordering[Int].compare(x._2, y._2)
})
println(maxResult)
}
}
| heygawau/Sparkinou | src/main/scala/sparkP1.scala | Scala | apache-2.0 | 1,291 |
package docs
import java.nio.charset.StandardCharsets
import mdoc.Reporter
import mdoc.StringModifier
import mdoc.internal.pos.PositionSyntax._
import scala.meta.inputs.Input
import scala.meta.inputs.Position
import scala.meta.internal.io.FileIO
import scala.meta.internal.io.PathIO
import scala.meta.io.AbsolutePath
class FileModifier extends StringModifier {
val name = "file"
override def process(
info: String,
code: Input,
reporter: Reporter
): String = {
val file = AbsolutePath(info)
if (file.isFile) {
val text = FileIO.slurp(file, StandardCharsets.UTF_8).trim
val language = PathIO.extension(file.toNIO) match {
case "scala" => "scala"
case "md" => "md"
case _ => "text"
}
s"""
File: [${file.toNIO.getFileName}](https://github.com/scalameta/mdoc/blob/master/$info)
`````$language
$text
`````
"""
} else {
val pos =
Position.Range(code, 0 - info.length - 1, 0 - 1).toUnslicedPosition
reporter.error(pos, s"no such file: $file")
""
}
}
}
| scalameta/scalafmt | scalafmt-docs/src/main/scala/docs/FileModifier.scala | Scala | apache-2.0 | 1,064 |
/*
* Copyright (c) 2013-2022 Erik van Oosten
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.grons.metrics4.scala
import java.text.SimpleDateFormat
import com.codahale.metrics.health.HealthCheck.Result
import com.codahale.metrics.health.{HealthCheck, HealthCheckRegistry}
import org.mockito.Mockito._
import org.scalactic.Equality
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.matchers.should.Matchers._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration.{DurationInt, FiniteDuration}
import scala.concurrent.{Future, Promise, TimeoutException}
import scala.language.implicitConversions
import scala.util.Try
class HealthCheckSpec extends AnyFunSpec {
implicit private val resultWithApproximateTimestampEquality: Equality[Result] =
HealthCheckResultWithApproximateTimestampEquality
describe("healthCheck factory method") {
it ("registers the created checker") {
val checkOwner = newCheckOwner
val check = checkOwner.createBooleanHealthCheck { true }
verify(checkOwner.registry).register("nl.grons.metrics4.scala.CheckOwner.test", check)
}
it("build health checks that call the provided checker") {
val mockChecker = mock(classOf[SimpleChecker])
when(mockChecker.check()).thenReturn(true).thenReturn(false).thenReturn(true).thenReturn(false)
val check = newCheckOwner.createCheckerHealthCheck(mockChecker)
check.execute() should equal(Result.healthy())
check.execute() should equal(Result.unhealthy("FAIL"))
check.execute() should equal(Result.healthy())
check.execute() should equal(Result.unhealthy("FAIL"))
}
it("supports Boolean checker returning true") {
val check = newCheckOwner.createBooleanHealthCheck { true }
check.execute() should equal(Result.healthy())
}
it("supports Boolean checker returning false") {
val check = newCheckOwner.createBooleanHealthCheck { false }
check.execute() should equal(Result.unhealthy("FAIL"))
}
it("supports Boolean checker returning true implicitly") {
val check = newCheckOwner.createImplicitBooleanHealthCheck { Success }
check.execute() should equal(Result.healthy())
}
it("supports Boolean checker returning false implicitly") {
val check = newCheckOwner.createImplicitBooleanHealthCheck { Failure }
check.execute() should equal(Result.unhealthy("FAIL"))
}
it("supports Try checker returning Success[Long]") {
val check = newCheckOwner.createTryHealthCheck { Try(123L) }
check.execute() should equal(Result.healthy("123"))
}
it("supports Try checker returning Success(Unit)") {
val check = newCheckOwner.createTryHealthCheck { Try(()) }
check.execute() should equal(Result.healthy())
}
it("supports Try checker returning Success(null)") {
val check = newCheckOwner.createTryHealthCheck { Try[String](null) }
check.execute() should equal(Result.healthy("null"))
}
it("supports Try checker returning Failure") {
val exception: IllegalArgumentException = new IllegalArgumentException()
val check = newCheckOwner.createTryHealthCheck { Try(throw exception) }
check.execute() should equal(Result.unhealthy(exception))
}
it("supports Either checker returning Right[Long]") {
val check = newCheckOwner.createEitherHealthCheck { Right(123L) }
check.execute() should equal(Result.healthy("123"))
}
it("supports Either checker returning Left[Boolean]") {
val check = newCheckOwner.createEitherHealthCheck { Left(true) }
check.execute() should equal(Result.unhealthy("true"))
}
it("supports Either checker returning Right[String]") {
val check = newCheckOwner.createEitherHealthCheck { Right("I am alright") }
check.execute() should equal(Result.healthy("I am alright"))
}
it("supports Either checker returning Left[String]") {
val check = newCheckOwner.createEitherHealthCheck { Left("Oops, I am not fine") }
check.execute() should equal(Result.unhealthy("Oops, I am not fine"))
}
it("supports Either checker returning Left[Throwable]") {
val exception: IllegalArgumentException = new IllegalArgumentException()
val check = newCheckOwner.createEitherHealthCheck { Left(exception) }
check.execute() should equal(Result.unhealthy(exception))
}
it("supports Result checker returning Result unchanged") {
val result = Result.healthy()
val check = newCheckOwner.createResultHealthCheck { result }
check.execute() should be theSameInstanceAs result
}
it("supports checker throwing an exception") {
val exception: IllegalArgumentException = new IllegalArgumentException()
val check = newCheckOwner.createThrowingHealthCheck(exception)
check.execute() should equal(Result.unhealthy(exception))
}
it("supports override of metric base name") {
val checkOwner = new CheckOwner() {
override lazy val metricBaseName: MetricName = MetricName("OverriddenMetricBaseName")
}
val check = checkOwner.createBooleanHealthCheck { true }
verify(checkOwner.registry).register("OverriddenMetricBaseName.test", check)
}
it("supports Unit checker with side-effects (healthy)") {
var counter = 0
val sideEffect: () => Unit = () => {
counter += 1
}
val check = newCheckOwner.createUnitHealthCheckWithSideEffect(sideEffect)
check.execute() should equal(Result.healthy())
counter should be (1)
check.execute() should equal(Result.healthy())
counter should be (2)
}
it("supports Unit checker with side-effects (unhealthy)") {
val checkerFailure = new IllegalArgumentException()
var counter = 0
val sideEffect: () => Unit = () => {
counter += 1
throw checkerFailure
}
val check = newCheckOwner.createUnitHealthCheckWithSideEffect(sideEffect)
check.execute() should equal(Result.unhealthy(checkerFailure))
counter should be (1)
check.execute() should equal(Result.unhealthy(checkerFailure))
counter should be (2)
}
it("supports Future checker returning a Success(Long)") {
val check = newCheckOwner.createFutureHealthCheck(200.milliseconds)(Future {
Thread.sleep(50)
123L
})
check.execute() should equal(Result.healthy("123"))
}
it("supports Future checker returning a Failure(exception)") {
val exception: IllegalArgumentException = new IllegalArgumentException()
val check = newCheckOwner.createFutureHealthCheck(200.milliseconds)(Future[Long] {
Thread.sleep(50)
throw exception
})
check.execute() should equal(Result.unhealthy(exception))
}
it("supports Future checker not returning in time") {
val hopelessFuture = Promise().future
val check = newCheckOwner.createFutureHealthCheck(10.milliseconds)(hopelessFuture)
val checkResult = check.execute()
checkResult.isHealthy should be (false)
checkResult.getError shouldBe a[TimeoutException]
}
}
private val newCheckOwner = new CheckOwner()
}
/**
* [[HealthCheck.Result]] equality for testing purposes:
* * the timestamp may be off a little bit
* * the error must be the same instance
* * details are not checked
*/
private object HealthCheckResultWithApproximateTimestampEquality extends Equality[Result] {
private def timestampInMillis(result: Result): Long = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSXXX").parse(result.getTimestamp).getTime
def areEqual(a: Result, b: Any): Boolean = b match {
case r: Result =>
a.isHealthy == r.isHealthy &&
(a.getError eq r.getError) &&
a.getMessage === r.getMessage &&
timestampInMillis(a) === timestampInMillis(r) +- 200L
case _ => false
}
}
private trait SimpleChecker {
def check(): Boolean
}
private class CheckOwner() extends CheckedBuilder {
val registry: HealthCheckRegistry = mock(classOf[HealthCheckRegistry])
// Unfortunately we need a helper method for each supported type. If we wanted a single helper method,
// we would need to repeat the magnet pattern right here in a test class :(
def createBooleanHealthCheck(checker: => Boolean): HealthCheck =
healthCheck("test", "FAIL")(checker)
def createImplicitBooleanHealthCheck(checker: => Outcome): HealthCheck =
healthCheck("test", "FAIL")(checker)
def createTryHealthCheck(checker: => Try[_]): HealthCheck =
healthCheck("test", "FAIL")(checker)
def createEitherHealthCheck(checker: => Either[_, _]): HealthCheck =
healthCheck("test", "FAIL")(checker)
def createResultHealthCheck(checker: => Result): HealthCheck =
healthCheck("test", "FAIL")(checker)
def createThrowingHealthCheck(checkerFailure: => Throwable): HealthCheck = {
def alwaysFails(): Boolean = throw checkerFailure
healthCheck("test", "FAIL")(alwaysFails())
}
def createCheckerHealthCheck(checker: => SimpleChecker): HealthCheck =
healthCheck("test", "FAIL")(checker.check())
def createUnitHealthCheckWithSideEffect(sideEffect: () => Unit): HealthCheck = {
// Tests an inline block because of
// https://github.com/erikvanoosten/metrics-scala/issues/42,
// https://github.com/erikvanoosten/metrics-scala/pull/59 and
// https://issues.scala-lang.org/browse/SI-3237
healthCheck("test", "FAIL") {
sideEffect()
// Force result type of unit:
()
}
}
def createFutureHealthCheck(timeout: FiniteDuration)(checker: => Future[_]): HealthCheck = {
implicit val checkTimeout = timeout
healthCheck("test", "FAIL")(checker)
}
}
/** Used to test implicit conversion to boolean. */
private sealed trait Outcome
private case object Success extends Outcome
private case object Failure extends Outcome
/** Implicitly convertible to [[scala.Boolean]]. */
private object Outcome {
implicit def outcome2Boolean(outcome: Outcome): Boolean = outcome match {
case Success => true
case Failure => false
}
}
| erikvanoosten/metrics-scala | metrics-scala/src/test/scala/nl/grons/metrics4/scala/CheckedBuilderSpec.scala | Scala | apache-2.0 | 10,635 |
package io.megl.scalajs
import io.megl.scalajs.HTML2SJS.{HTMLOptions, convertToXML, validateI18n}
import scala.collection.mutable.ListBuffer
import scala.xml.{Node, XML}
class HtmlTreeProcessor(htmlCode: String, option:HTMLOptions=HTMLOptions()) extends ConversionUtils {
val root = {
val xml=convertToXML(htmlCode)
.replace("<!--", "<comment>")
.replace("-->", "</comment>")
println(xml)
processNode(XML.loadString(xml),0)
}
private lazy val extractIncludeComment="""\\[html-partial:include:\\{"file":"(.*)"\\}\\]""".r
private def processNode(node: Node, ident: Int, prevLabel: String = ""): Option[HTMLNode] = {
val label = node.label
// println(label)
label match {
case "#PCDATA" =>
val realText = node.text.replace("\\n\\t\\n", " ").replace("\\\\s+", " ")
val children= node.child
.flatMap(c => processNode(c, ident + 1, prevLabel = label)).filter(_.nonEmpty)
if(realText.trim.nonEmpty || children.nonEmpty) {
Some(TextNode(realText, children=children))
} else None
case "comment" =>
val realText = node.text.replace("\\n\\t\\n", " ").replace("\\\\s+", " ")
if(realText.trim.nonEmpty) {
val includes=extractIncludeComment.findAllIn(realText).matchData.map(_.group(1)).toList
if(includes.nonEmpty){
var entry=filenameToNode(includes.head)
Some(RawNode(entry+"()"))
} else {
Some(CommentNode(realText.trim))
}
} else None
case _ =>
val realText = node.text.replace("\\n\\t\\n", " ").replace("\\\\s+", " ")
// println(node.label)
val attributes:Seq[(String,String)] = node.attributes.asAttrMap.flatten {
case (name, value) =>
name match {
case "class" =>
List("^.cls" -> s"""\\"$value\\"""")
case "type" =>
List("^.tpe" -> s"""\\"$value\\"""")
case "style" =>
value.split(";").map(_.trim).filter(_.nonEmpty).map {
st =>
val tokens = st.split(":").map(_.trim)
fixStyle(s"""^.style.${convertCase(tokens(0))}""") -> s"""\\"${tokens.drop(1).mkString(" ")}\\""""
}
case s: String if s.startsWith("data-") =>
List(s"""VdomAttr("$s")""" -> s"""\\"$value\\"""")
case s: String if s.startsWith("m-") =>
List(s"""VdomAttr("$s")""" -> s"""\\"$value\\"""")
case s: String if s.startsWith("aria-") =>
List(s"""^.aria.${fixAria(convertCase(s.replace("aria-", "")).toLowerCase())}""" -> s"""\\"$value\\"""")
case "checked" =>
List("^.checked" -> toBoolean(value))
case default =>
List(s"""^.$default""" -> s"""\\"$value\\"""")
}
}.toSeq
val children = node.child
.flatMap { c =>
processNode(c, ident + 1, prevLabel = label)
}
if(children.isEmpty){
// possible font=
if(label=="i" && attributes.exists( _._1 == "^.cls" )) {
val cls=attributes.find( _._1 == "^.cls" ).map(_._2).map(s=> s.substring(1, s.length-1).trim).getOrElse("").split(' ').filter(_.nonEmpty)
if(cls.contains("fa"))
return Some(FontAwesome(cls.toList))
else if(cls.contains("la"))
return Some(LineAwesome(cls.toList))
else if(cls.exists( _.startsWith("flaticon-")))
return Some(FlatIcon(cls.toList))
}
}
Some(GenericHTMLNode(label, children=children, attributes = attributes, ident=ident))
}
}
def fixAria(attr:String):String={
attr match {
case "haspopup" => "hasPopup"
case _ => attr
}
}
def fixStyle(attr:String):String={
attr match {
case "^.style.WIDTH" => "^.width"
case "^.style.MIN_HEIGHT" => "^.minHeight"
case "^.style.MAX_HEIGHT" => "^.maxHeight"
case "^.style.BACKGROUND_IMAGE" => "^.backgroundImage"
case "^.style.MARGIN_TOP" => "^.marginTop"
case _ => attr
}
}
def toBoolean(str: String):String={
str match {
case "true"|"false" => str
case "checked" => "true"
case _ => str
}
}
def body:HTMLNode=root.get.children.filter(_.tag=="body").flatten(_.children).head
def bodyChildren:Seq[HTMLNode]=root.get.children.filter(_.tag=="body").flatten(_.children)
}
| aparo/scalajs-converter | html2sjs/src/main/scala/io/megl/scalajs/HtmlTreeProcessor.scala | Scala | apache-2.0 | 4,502 |
import sbt._
import EvaluateConfigurations._
import sbt.Build._
import sbt.Keys._
import com.olegych.scastie.{ScriptSecurityManager, SecuredRun}
object ApplicationBuild extends Build {
val runAll = TaskKey[Unit]("run-all")
val rendererWorker = Project(id = "rendererWorker", base = file("."),
settings = Defaults.defaultSettings ++ DefaultSettings.apply ++ Seq(
runAll <<=
(discoveredMainClasses in Compile, fullClasspath in Compile, runner in(Compile, run), streams) map
runAllTask,
runner in(Compile, run) <<= (taskTemporaryDirectory, scalaInstance) map { (nativeTmp, instance) =>
new SecuredRun(instance, false, nativeTmp)
},
libraryDependencies += "org.scalatest" %% "scalatest" % "2.0",
onLoad in Global := addDepsToState
))
def runAllTask(discoveredMainClasses: Seq[String], fullClasspath: Keys.Classpath, runner: ScalaRun,
streams: Keys.TaskStreams) {
val errors = discoveredMainClasses.flatMap { mainClass =>
runner.run(mainClass, data(fullClasspath), Nil, streams.log)
}
if (!errors.isEmpty) {
sys.error(errors.mkString("\\n"))
}
}
def addDepsToState(state: State): State = {
val sessionSettings = state.get(Keys.sessionSettings).get
val dependencies = extractDependencies(sessionSettings.currentEval(),
Project.extract(state).currentLoader, state)
SessionSettings
.reapply(sessionSettings.appendRaw(dependencies).appendRaw(onLoad in Global := idFun), state)
}
val allowedKeys = Set[Init[_]#KeyedInitialize[_]](libraryDependencies, scalaVersion, resolvers, scalacOptions, sbtPlugin)
def extractDependencies(eval: compiler.Eval, loader: ClassLoader, state: State): Seq[Setting[_]] = {
val scriptArg = "src/main/scala/test.scala"
val script = file(scriptArg).getAbsoluteFile
try {
ScriptSecurityManager.hardenPermissions {
val embeddedSettings = Script.blocks(script).flatMap { block =>
val imports = List("import sbt._", "import Keys._")
evaluateConfiguration(eval, script, block.lines, imports, block.offset + 1)(loader)
}
embeddedSettings.flatMap {
case setting if allowedKeys.exists(_.scopedKey == setting.key) =>
Project.transform(_ => GlobalScope, setting)
case _ => Nil
}
}
} catch {
case e: Throwable =>
state.log.error(e.getClass.toString)
state.log.error(e.getMessage)
e.getStackTrace.take(100).foreach(e => state.log.error(e.toString))
state.log.trace(e)
Nil
}
}
} | atamborrino/zen-coding-game | renderer-template/project/ApplicationBuild.scala | Scala | apache-2.0 | 2,610 |
// Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gimd.jgit
import com.google.gimd.GimdException
case class JGitDatabaseException(val branch: JGitBranch,
val msg: String,
val cause: Throwable) extends GimdException(msg, cause) {
def this(branch: JGitBranch, msg: String) = this(branch, msg, null)
def this(branch: JGitBranch, cause: Throwable) = this(branch, null, cause)
}
| gkossakowski/gimd | src/main/scala/com/google/gimd/jgit/JGitDatabaseException.scala | Scala | apache-2.0 | 1,026 |
package org.jetbrains.plugins.scala
package debugger
import java.io.File
import java.util.concurrent.atomic.AtomicReference
import com.intellij.debugger.DebuggerManagerEx
import com.intellij.debugger.engine._
import com.intellij.debugger.engine.evaluation._
import com.intellij.debugger.engine.evaluation.expression.EvaluatorBuilder
import com.intellij.debugger.engine.events.DebuggerContextCommandImpl
import com.intellij.debugger.impl._
import com.intellij.execution.Executor
import com.intellij.execution.application.{ApplicationConfiguration, ApplicationConfigurationType}
import com.intellij.execution.configurations.RunnerSettings
import com.intellij.execution.executors.DefaultDebugExecutor
import com.intellij.execution.process.{ProcessAdapter, ProcessEvent, ProcessHandler, ProcessListener}
import com.intellij.execution.runners.{ExecutionEnvironmentBuilder, ProgramRunner}
import com.intellij.execution.ui.RunContentDescriptor
import com.intellij.openapi.fileEditor.FileDocumentManager
import com.intellij.openapi.module.Module
import com.intellij.openapi.util.Key
import com.intellij.psi.PsiCodeFragment
import com.intellij.psi.search.GlobalSearchScope
import com.intellij.testFramework.UsefulTestCase
import com.intellij.util.concurrency.Semaphore
import com.sun.jdi.VoidValue
import org.jetbrains.plugins.scala.debugger.evaluation.ScalaCodeFragmentFactory
import org.jetbrains.plugins.scala.extensions._
import org.junit.Assert
import scala.collection.mutable
/**
* User: Alefas
* Date: 13.10.11
*/
abstract class ScalaDebuggerTestCase extends ScalaDebuggerTestBase {
private val breakpoints: mutable.Set[(String, Int)] = mutable.Set.empty
protected def runDebugger(mainClass: String, debug: Boolean = false)(callback: => Unit) {
var processHandler: ProcessHandler = null
UsefulTestCase.edt(new Runnable {
def run() {
if (needMake) {
make()
saveChecksums()
}
addBreakpoints()
val runner = ProgramRunner.PROGRAM_RUNNER_EP.getExtensions.find { _.getClass == classOf[GenericDebuggerRunner] }.get
processHandler = runProcess(mainClass, getModule, classOf[DefaultDebugExecutor], new ProcessAdapter {
override def onTextAvailable(event: ProcessEvent, outputType: Key[_]) {
val text = event.getText
if (debug) print(text)
}
}, runner)
}
})
callback
getDebugProcess.stop(true)
processHandler.destroyProcess()
}
protected def runProcess(className: String,
module: Module,
executorClass: Class[_ <: Executor],
listener: ProcessListener,
runner: ProgramRunner[_ <: RunnerSettings]): ProcessHandler = {
val configuration: ApplicationConfiguration = new ApplicationConfiguration("app", module.getProject, ApplicationConfigurationType.getInstance)
configuration.setModule(module)
configuration.setMainClassName(className)
val executor: Executor = Executor.EXECUTOR_EXTENSION_NAME.findExtension(executorClass)
val executionEnvironmentBuilder: ExecutionEnvironmentBuilder = new ExecutionEnvironmentBuilder(module.getProject, executor)
executionEnvironmentBuilder.runProfile(configuration)
val semaphore: Semaphore = new Semaphore
semaphore.down()
val processHandler: AtomicReference[ProcessHandler] = new AtomicReference[ProcessHandler]
runner.execute(executionEnvironmentBuilder.build, new ProgramRunner.Callback {
def processStarted(descriptor: RunContentDescriptor) {
val handler: ProcessHandler = descriptor.getProcessHandler
assert(handler != null)
handler.addProcessListener(listener)
processHandler.set(handler)
semaphore.up()
}
})
semaphore.waitFor()
processHandler.get
}
protected override def tearDown(): Unit = {
super.tearDown()
}
protected def getDebugProcess: DebugProcessImpl = {
getDebugSession.getProcess
}
protected def getDebugSession: DebuggerSession = {
DebuggerManagerEx.getInstanceEx(getProject).getContext.getDebuggerSession
}
private def resume() {
getDebugProcess.getManagerThread.invoke(getDebugProcess.
createResumeCommand(getDebugProcess.getSuspendManager.getPausedContext))
}
protected def addBreakpoint(fileName: String, line: Int) {
breakpoints += ((fileName, line))
}
private def addBreakpoints() {
breakpoints.foreach {
case (fileName, line) =>
val ioFile = new File(srcDir, fileName)
val file = getVirtualFile(ioFile)
UsefulTestCase.edt(new Runnable {
def run() {
val document = FileDocumentManager.getInstance().getDocument(file)
val breakpointManager = DebuggerManagerEx.getInstanceEx(getProject).getBreakpointManager
breakpointManager.addLineBreakpoint(document, line)
}
})
}
}
protected def waitForBreakpoint(): SuspendContextImpl = {
var i = 0
def processTerminated: Boolean = getDebugProcess.getExecutionResult.getProcessHandler.isProcessTerminated
while (i < 1000 && suspendContext == null && !processTerminated) {
Thread.sleep(10)
i += 1
}
assert(suspendContext != null, "too long process, terminated=" + processTerminated)
suspendContext
}
protected def managed[T >: Null](callback: => T): T = {
var result: T = null
def ctx = DebuggerContextUtil.createDebuggerContext(getDebugSession, suspendContext)
val semaphore = new Semaphore()
semaphore.down()
getDebugProcess.getManagerThread.invokeAndWait(new DebuggerContextCommandImpl(ctx) {
def threadAction() {
result = callback
semaphore.up()
}
})
def finished = semaphore.waitFor(20000)
assert(finished, "Too long debugger action")
result
}
protected def suspendManager = getDebugProcess.getSuspendManager
protected def suspendContext = suspendManager.getPausedContext
protected def evaluationContext() = new EvaluationContextImpl(suspendContext, suspendContext.getFrameProxy, suspendContext.getFrameProxy.thisObject())
protected def currentSourcePosition = ContextUtil.getSourcePosition(suspendContext)
protected def evalResult(codeText: String): String = {
val semaphore = new Semaphore()
semaphore.down()
val result =
managed[String] {
inReadAction {
val ctx: EvaluationContextImpl = evaluationContext()
val factory = new ScalaCodeFragmentFactory()
val codeFragment: PsiCodeFragment = new CodeFragmentFactoryContextWrapper(factory).
createCodeFragment(new TextWithImportsImpl(CodeFragmentKind.EXPRESSION, codeText),
ContextUtil.getContextElement(ctx), getProject)
codeFragment.forceResolveScope(GlobalSearchScope.allScope(getProject))
DebuggerUtils.checkSyntax(codeFragment)
val evaluatorBuilder: EvaluatorBuilder = factory.getEvaluatorBuilder
val evaluator = evaluatorBuilder.build(codeFragment, currentSourcePosition)
val value = evaluator.evaluate(ctx)
val res = value match {
case v: VoidValue => "undefined"
case _ => DebuggerUtils.getValueAsString(ctx, value)
}
semaphore.up()
res
}
}
assert(semaphore.waitFor(10000), "Too long evaluate expression: " + codeText)
result
}
protected def evalEquals(codeText: String, expected: String) {
Assert.assertEquals(expected, evalResult(codeText))
}
protected def evalStartsWith(codeText: String, startsWith: String) {
val result = evalResult(codeText)
Assert.assertTrue(result + " doesn't starts with " + startsWith,
result.startsWith(startsWith))
}
protected def addOtherLibraries() = {}
def checkLocation(source: String, methodName: String, lineNumber: Int): Unit = {
def format(s: String, mn: String, ln: Int) = s"$s:$mn:$ln"
managed {
val location = suspendContext.getFrameProxy.getStackFrame.location
val expected = format(source, methodName, lineNumber)
val actualLine = inReadAction {
new ScalaPositionManager(getDebugProcess).getSourcePosition(location).getLine
}
val actual = format(location.sourceName, location.method().name(), actualLine + 1)
Assert.assertEquals("Wrong location:", expected, actual)
}
}
} | triggerNZ/intellij-scala | test/org/jetbrains/plugins/scala/debugger/ScalaDebuggerTestCase.scala | Scala | apache-2.0 | 8,440 |
package com.github.curzonj
import org.json4s._
import java.util.UUID
object JavaTypesSerializers {
val all = List(UUIDSerializer)
}
case object UUIDSerializer extends CustomSerializer[UUID](format => (
{
case JString(s) => UUID.fromString(s)
case JNull => null
},
{
case x: UUID => JString(x.toString)
}
)
)
| curzonj/spacebox-scala-sim2 | src/main/scala/com/github/curzonj/JavaTypeSerializers.scala | Scala | agpl-3.0 | 335 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn
import com.intel.analytics.bigdl.dllib.tensor.{Storage, Tensor}
import com.intel.analytics.bigdl.dllib.utils.serializer.ModuleSerializationTest
import org.scalatest.{FlatSpec, Matchers}
import scala.util.Random
class ScaleSpec extends FlatSpec with Matchers{
val input = Tensor(Storage(Array(
1.3968338966, 1.0623255968, 0.0113903601, 1.6713322401,
1.2211480141, -1.4509917498, 0.9407374859, 1.3669412136,
-0.1955126524, -0.1213591248, -0.4367840588, 1.0001722574,
0.9071449637, 0.9841009378, 1.0343499184, -1.2177716494,
0.0444964543, -0.3394794762, 0.9685149193, 0.0013315412,
1.2855026722, -1.0687378645, -0.8125880957, 0.2460595369,
-0.5525790453, -1.4602638483, -0.6113839149, -0.4403405488,
0.8861535788, -1.1445546150, 0.3850491047, 0.2242770344,
1.5059198141, -0.3505563736, 0.3127737045, 0.7735480666,
0.5683772564, -1.3868474960, 1.3575958014, -1.9670666456,
0.9238662124, 1.6204817295, 0.4601316452, 0.6467041969,
0.8394199014, -0.9312202334, 0.5268830657, -0.0692716911,
-0.8119943738, 0.3967324793, -0.4461912215, -0.1913439631,
1.0490620136, -0.0018771883, -1.5796754360, 0.4819125235,
0.1086308882, 0.9048879743, 1.8832274675, 0.2517957985,
-0.1036709696, -0.4925992191, 0.0656546056, -1.0584318638,
0.2286393940, 0.7230707407, 0.8117146492, 0.8037125468,
-1.5480978489, 0.6889602542, 1.9156422615, -1.3656581640,
-0.1560046375, -0.2486510724, -0.0217402168, 1.2162036896,
0.6996396780, 0.6564534903, 0.8134143949, 0.0601990744,
1.0776667595, -0.5910157561, 1.0320621729, 0.0460616127,
-1.3710715771, -0.3773273826, 0.3672361672, 0.5238098502,
0.0119323730, -0.2517851293, 1.4480800629, 0.0828312412,
1.6660629511, -0.6199936271, -1.7660367489, 0.3837936223,
-0.7086514831, 1.6811115742, 0.6771110296, 1.1161595583,
0.1626710445, 0.4269112945, -0.4675120413, 1.5107829571,
0.6962547302, 0.0728486627, -0.3824745715, 0.5898073316,
0.1672963798, 1.2455582619, 2.1523268223, 1.0041118860,
-0.5187900066, -0.6860033870, -0.6559141874, 0.3032713532,
-2.0993845463, 0.2686079144, -0.4012460411, 0.6594560742))).resize(1, 4, 5, 6)
"scale forward backward" should "work properly" in {
val scale = new Scale[Double](Array(1, 4, 1, 1))
scale.parameters()._1(0).copy(Tensor(Storage(Array(0.4, 0.3, 0.2, 0.1)))) // weight
scale.parameters()._1(1).copy(Tensor(Storage(Array(0.1, 0.01, 0.03, 0.04)))) // bias
val expectedOutput = Tensor(Storage(Array(
0.6587336063, 0.5249302387, 0.1045561433, 0.7685329318,
0.5884591937, -0.4803967178, 0.4762949944, 0.6467764974,
0.0217949376, 0.0514563508, -0.0747136250, 0.5000689030,
0.4628579915, 0.4936403632, 0.5137400031, -0.3871086836,
0.1177985817, -0.0357917920, 0.4874059558, 0.1005326211,
0.6142011285, -0.3274951577, -0.2250352502, 0.1984238178,
-0.1210316196, -0.4841055572, -0.1445535719, -0.0761362240,
0.4544614255, -0.3578218520, 0.1255147308, 0.0772831142,
0.4617759585, -0.0951669216, 0.1038321108, 0.2420644313,
0.1805131882, -0.4060542881, 0.4172787368, -0.5801200271,
0.2871598601, 0.4961445332, 0.1480395049, 0.2040112764,
0.2618259788, -0.2693660855, 0.1680649370, -0.0107815079,
-0.2335983217, 0.1290197521, -0.1238573715, -0.0474031940,
0.3247185946, 0.0094368430, -0.4639026523, 0.1545737684,
0.0425892696, 0.2814663947, 0.5749682784, 0.0855387375,
0.0092658047, -0.0685198456, 0.0431309193, -0.1816863716,
0.0757278800, 0.1746141464, 0.1923429370, 0.1907425076,
-0.2796195745, 0.1677920520, 0.4131284654, -0.2431316376,
-0.0012009293, -0.0197302159, 0.0256519560, 0.2732407451,
0.1699279398, 0.1612907052, 0.1926828772, 0.0420398153,
0.2455333620, -0.0882031545, 0.2364124358, 0.0392123237,
-0.2442143261, -0.0454654768, 0.1034472361, 0.1347619742,
0.0323864743, -0.0203570258, 0.1848080158, 0.0482831225,
0.2066062987, -0.0219993629, -0.1366036832, 0.0783793628,
-0.0308651477, 0.2081111670, 0.1077111065, 0.1516159475,
0.0562671050, 0.0826911330, -0.0067512058, 0.1910783052,
0.1096254736, 0.0472848639, 0.0017525405, 0.0989807323,
0.0567296371, 0.1645558178, 0.2552326918, 0.1404111981,
-0.0118790008, -0.0286003426, -0.0255914181, 0.0703271329,
-0.1699384451, 0.0668607950, -0.0001246072, 0.1059456095))).resize(1, 4, 5, 6)
val output = scale.forward(input)
output.map(expectedOutput, (a, b) => {
assert(Math.abs(a - b) < 1e-6)
a
})
val outDiff = Tensor(Storage(Array(0.2203191519, 0.2832591236, 0.1163618937, 0.2043310404,
0.3571787775, 0.3585172594, 0.0502341278, 0.0828971490,
0.0205868818, 0.1763239354, 0.0119504845, 0.1827332824,
0.2596576214, 0.1113949195, 0.2705019712, 0.2363451272,
0.0095927529, 0.2235416472, 0.1037009880, 0.1660404801,
0.1134100333, 0.2772551775, 0.1761814803, 0.0627470985,
0.2178596109, 0.3121258914, 0.1225454137, 0.0887831524,
0.1551885009, 0.3745534718, 0.2927986383, 0.2017151117,
0.2708502412, 0.2537252605, 0.1133982167, 0.0276651029,
0.1960232854, 0.1673522294, 0.1084694341, 0.0675163567,
0.1219559833, 0.1406820863, 0.0807706788, 0.0875378400,
0.1373059303, 0.2581601739, 0.1758758873, 0.0850463584,
0.0833932534, 0.1363866329, 0.0616231076, 0.0604136176,
0.1542105228, 0.0261688121, 0.1450756639, 0.1086528674,
0.2123059928, 0.2240238786, 0.2073278874, 0.2067541331,
0.0747200251, 0.1336269677, 0.0679697320, 0.1145587713,
0.0651614293, 0.0890290067, 0.0123057868, 0.0485350862,
0.1943205297, 0.0461168401, 0.1382955164, 0.1300953776,
0.1447878331, 0.0950177237, 0.1193327531, 0.0133938855,
0.0145124272, 0.0397952050, 0.0303722005, 0.0200208705,
0.0258587729, 0.1106555462, 0.0375629663, 0.1904202551,
0.1363223642, 0.1082039401, 0.1414361298, 0.0527773313,
0.1853451431, 0.1678386182, 0.0726319477, 0.0480239950,
0.0842103213, 0.0744752362, 0.0660325885, 0.0913975239,
0.0633665547, 0.0365940593, 0.0552844591, 0.0196380578,
0.0192072298, 0.0725669637, 0.0784936771, 0.0972098336,
0.0850971416, 0.0543594323, 0.0089790877, 0.0488873236,
0.0927936360, 0.0787618235, 0.0485094227, 0.0455279350,
0.0217985772, 0.0177213382, 0.0073623671, 0.0892393216,
0.0640176609, 0.0143332323, 0.0414126925, 0.0049108928))).resize(1, 4, 5, 6)
val expectedGradInput = Tensor(Storage(Array(
0.0881276652, 0.1133036539, 0.0465447567, 0.0817324147,
0.1428715140, 0.1434069127, 0.0200936515, 0.0331588611,
0.0082347533, 0.0705295727, 0.0047801938, 0.0730933174,
0.1038630530, 0.0445579700, 0.1082007885, 0.0945380554,
0.0038371012, 0.0894166604, 0.0414803959, 0.0664161965,
0.0453640148, 0.1109020710, 0.0704725906, 0.0250988398,
0.0871438459, 0.1248503551, 0.0490181670, 0.0355132632,
0.0620754026, 0.1498213857, 0.0878395960, 0.0605145358,
0.0812550783, 0.0761175826, 0.0340194665, 0.0082995314,
0.0588069893, 0.0502056703, 0.0325408317, 0.0202549081,
0.0365867950, 0.0422046259, 0.0242312048, 0.0262613539,
0.0411917791, 0.0774480551, 0.0527627692, 0.0255139079,
0.0250179768, 0.0409159921, 0.0184869338, 0.0181240868,
0.0462631583, 0.0078506442, 0.0435227007, 0.0325958617,
0.0636918023, 0.0672071651, 0.0621983670, 0.0620262437,
0.0149440048, 0.0267253947, 0.0135939466, 0.0229117554,
0.0130322864, 0.0178058017, 0.0024611575, 0.0097070178,
0.0388641059, 0.0092233680, 0.0276591033, 0.0260190759,
0.0289575662, 0.0190035459, 0.0238665510, 0.0026787771,
0.0029024854, 0.0079590408, 0.0060744402, 0.0040041744,
0.0051717549, 0.0221311096, 0.0075125932, 0.0380840525,
0.0272644740, 0.0216407888, 0.0282872263, 0.0105554666,
0.0370690301, 0.0335677229, 0.0072631948, 0.0048023998,
0.0084210327, 0.0074475235, 0.0066032591, 0.0091397529,
0.0063366555, 0.0036594060, 0.0055284458, 0.0019638059,
0.0019207230, 0.0072566965, 0.0078493683, 0.0097209839,
0.0085097142, 0.0054359431, 0.0008979088, 0.0048887325,
0.0092793638, 0.0078761829, 0.0048509422, 0.0045527937,
0.0021798578, 0.0017721339, 0.0007362367, 0.0089239320,
0.0064017661, 0.0014333233, 0.0041412693, 0.0004910893
))).resize(1, 4, 5, 6)
val diff = scale.backward(input, outDiff)
diff.map(expectedGradInput, (a, b) => {
assert(Math.abs(a - b) < 1e-6)
a
})
}
"scale zeroParameter" should "work" in {
val scale = new Scale[Double](Array(1, 4, 1, 1))
scale.parameters()._1(0).copy(Tensor(Storage(Array(0.4, 0.3, 0.2, 0.1)))) // weight
scale.parameters()._1(1).copy(Tensor(Storage(Array(0.1, 0.01, 0.03, 0.04)))) // bias
val output = scale.forward(input)
val gradOutput = Tensor[Double](1, 4, 5, 6).randn()
scale.backward(input, gradOutput)
println(scale.parameters()._2(0))
println(scale.parameters()._2(1))
scale.zeroGradParameters()
scale.parameters()._2(0).apply1(x => {
assert(x == 0); x
})
scale.parameters()._2(1).apply1(x => {
assert(x == 0); x
})
}
}
class ScaleSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val scale = Scale[Float](Array(1, 4, 1, 1)).setName("scale")
val input = Tensor[Float](1, 4, 5, 6).apply1(_ => Random.nextFloat())
runSerializationTest(scale, input)
}
}
| intel-analytics/BigDL | scala/dllib/src/test/scala/com/intel/analytics/bigdl/dllib/nn/ScaleSpec.scala | Scala | apache-2.0 | 10,143 |
/*
* Copyright (C) 2016 University of Basel, Graphics and Vision Research Group
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package scalismo.ui.view.action
import scalismo.ui.resources.icons.BundledIcon
import scalismo.ui.view.ScalismoFrame
import scalismo.ui.view.dialog.BackgroundColorDialog
import scala.swing.Action
class ShowBackgroundColorDialogAction(name: String = "Background Color")(implicit val frame: ScalismoFrame)
extends Action(name) {
icon = BundledIcon.Background.standardSized()
override def apply(): Unit = {
new BackgroundColorDialog().visible = true
}
}
| unibas-gravis/scalismo-ui | src/main/scala/scalismo/ui/view/action/ShowBackgroundColorDialogAction.scala | Scala | gpl-3.0 | 1,205 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import java.io.File
import com.google.common.io.Files
import org.apache.hadoop.fs.{FileContext, FsConstants, Path}
import org.apache.spark.sql.{AnalysisException, QueryTest, Row, SaveMode}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
import org.apache.spark.sql.catalyst.catalog.{CatalogStorageFormat, CatalogTable, CatalogTableType}
import org.apache.spark.sql.execution.command.LoadDataCommand
import org.apache.spark.sql.hive.test.TestHiveSingleton
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SQLTestUtils
import org.apache.spark.sql.types.StructType
class HiveCommandSuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
import testImplicits._
protected override def beforeAll(): Unit = {
super.beforeAll()
// Use catalog to create table instead of SQL string here, because we don't support specifying
// table properties for data source table with SQL API now.
hiveContext.sessionState.catalog.createTable(
CatalogTable(
identifier = TableIdentifier("parquet_tab1"),
tableType = CatalogTableType.MANAGED,
storage = CatalogStorageFormat.empty,
schema = new StructType().add("c1", "int").add("c2", "string"),
provider = Some("parquet"),
properties = Map("my_key1" -> "v1")
),
ignoreIfExists = false
)
sql(
"""
|CREATE TABLE parquet_tab2 (c1 INT, c2 STRING)
|STORED AS PARQUET
|TBLPROPERTIES('prop1Key'="prop1Val", '`prop2Key`'="prop2Val")
""".stripMargin)
sql("CREATE TABLE parquet_tab3(col1 int, `col 2` int) USING hive")
sql("CREATE TABLE parquet_tab4 (price int, qty int) partitioned by (year int, month int)")
sql("INSERT INTO parquet_tab4 PARTITION(year = 2015, month = 1) SELECT 1, 1")
sql("INSERT INTO parquet_tab4 PARTITION(year = 2015, month = 2) SELECT 2, 2")
sql("INSERT INTO parquet_tab4 PARTITION(year = 2016, month = 2) SELECT 3, 3")
sql("INSERT INTO parquet_tab4 PARTITION(year = 2016, month = 3) SELECT 3, 3")
sql(
"""
|CREATE TABLE parquet_tab5 (price int, qty int)
|PARTITIONED BY (year int, month int, hour int, minute int, sec int, extra int)
""".stripMargin)
sql(
"""
|INSERT INTO parquet_tab5
|PARTITION(year = 2016, month = 3, hour = 10, minute = 10, sec = 10, extra = 1) SELECT 3, 3
""".stripMargin)
sql(
"""
|INSERT INTO parquet_tab5
|PARTITION(year = 2016, month = 4, hour = 10, minute = 10, sec = 10, extra = 1) SELECT 3, 3
""".stripMargin)
sql("CREATE VIEW parquet_view1 as select * from parquet_tab4")
}
override protected def afterAll(): Unit = {
try {
sql("DROP TABLE IF EXISTS parquet_tab1")
sql("DROP TABLE IF EXISTS parquet_tab2")
sql("DROP TABLE IF EXISTS parquet_tab3")
sql("DROP VIEW IF EXISTS parquet_view1")
sql("DROP TABLE IF EXISTS parquet_tab4")
sql("DROP TABLE IF EXISTS parquet_tab5")
} finally {
super.afterAll()
}
}
test("show tables") {
withTable("show1a", "show2b") {
sql("CREATE TABLE show1a(c1 int)")
sql("CREATE TABLE show2b(c2 int)")
checkAnswer(
sql("SHOW TABLES IN default 'show1*'"),
Row("default", "show1a", false) :: Nil)
checkAnswer(
sql("SHOW TABLES IN default 'show1*|show2*'"),
Row("default", "show1a", false) ::
Row("default", "show2b", false) :: Nil)
checkAnswer(
sql("SHOW TABLES 'show1*|show2*'"),
Row("default", "show1a", false) ::
Row("default", "show2b", false) :: Nil)
assert(
sql("SHOW TABLES").count() >= 2)
assert(
sql("SHOW TABLES IN default").count() >= 2)
}
}
test("show views") {
withView("show1a", "show2b", "global_temp.temp1", "temp2") {
sql("CREATE VIEW show1a AS SELECT 1 AS id")
sql("CREATE VIEW show2b AS SELECT 1 AS id")
sql("CREATE GLOBAL TEMP VIEW temp1 AS SELECT 1 AS id")
sql("CREATE TEMP VIEW temp2 AS SELECT 1 AS id")
checkAnswer(
sql("SHOW VIEWS"),
Row("default", "show1a", false) ::
Row("default", "show2b", false) ::
Row("default", "parquet_view1", false) ::
Row("", "temp2", true) :: Nil)
checkAnswer(
sql("SHOW VIEWS IN default"),
Row("default", "show1a", false) ::
Row("default", "show2b", false) ::
Row("default", "parquet_view1", false) ::
Row("", "temp2", true) :: Nil)
checkAnswer(
sql("SHOW VIEWS FROM default"),
Row("default", "show1a", false) ::
Row("default", "show2b", false) ::
Row("default", "parquet_view1", false) ::
Row("", "temp2", true) :: Nil)
checkAnswer(
sql("SHOW VIEWS FROM global_temp"),
Row("global_temp", "temp1", true) ::
Row("", "temp2", true) :: Nil)
checkAnswer(
sql("SHOW VIEWS 'show1*|show2*'"),
Row("default", "show1a", false) ::
Row("default", "show2b", false) :: Nil)
checkAnswer(
sql("SHOW VIEWS LIKE 'show1*|show2*'"),
Row("default", "show1a", false) ::
Row("default", "show2b", false) :: Nil)
checkAnswer(
sql("SHOW VIEWS IN default 'show1*'"),
Row("default", "show1a", false) :: Nil)
checkAnswer(
sql("SHOW VIEWS IN default LIKE 'show1*|show2*'"),
Row("default", "show1a", false) ::
Row("default", "show2b", false) :: Nil)
}
}
test("show tblproperties of data source tables - basic") {
checkAnswer(
sql("SHOW TBLPROPERTIES parquet_tab1").filter(s"key = 'my_key1'"),
Row("my_key1", "v1") :: Nil
)
checkAnswer(
sql(s"SHOW TBLPROPERTIES parquet_tab1('my_key1')"),
Row("v1") :: Nil
)
}
test("show tblproperties for datasource table - errors") {
val message = intercept[AnalysisException] {
sql("SHOW TBLPROPERTIES badtable")
}.getMessage
assert(message.contains("Table or view not found: badtable"))
// When key is not found, a row containing the error is returned.
checkAnswer(
sql("SHOW TBLPROPERTIES parquet_tab1('invalid.prop.key')"),
Row("Table default.parquet_tab1 does not have property: invalid.prop.key") :: Nil
)
}
test("show tblproperties for hive table") {
checkAnswer(sql("SHOW TBLPROPERTIES parquet_tab2('prop1Key')"), Row("prop1Val"))
checkAnswer(sql("SHOW TBLPROPERTIES parquet_tab2('`prop2Key`')"), Row("prop2Val"))
}
Seq(true, false).foreach { local =>
val loadQuery = if (local) "LOAD DATA LOCAL" else "LOAD DATA"
test(loadQuery) {
testLoadData(loadQuery, local)
}
}
private def testLoadData(loadQuery: String, local: Boolean): Unit = {
// employee.dat has two columns separated by '|', the first is an int, the second is a string.
// Its content looks like:
// 16|john
// 17|robert
val testData = hiveContext.getHiveFile("data/files/employee.dat").getCanonicalFile()
/**
* Run a function with a copy of the input data file when running with non-local input. The
* semantics in this mode are that the input file is moved to the destination, so we have
* to make a copy so that subsequent tests have access to the original file.
*/
def withInputFile(fn: File => Unit): Unit = {
if (local) {
fn(testData)
} else {
val tmp = File.createTempFile(testData.getName(), ".tmp")
Files.copy(testData, tmp)
try {
fn(tmp)
} finally {
tmp.delete()
}
}
}
withTable("non_part_table", "part_table") {
sql(
"""
|CREATE TABLE non_part_table (employeeID INT, employeeName STRING)
|ROW FORMAT DELIMITED
|FIELDS TERMINATED BY '|'
|LINES TERMINATED BY '\n'
""".stripMargin)
// LOAD DATA INTO non-partitioned table can't specify partition
intercept[AnalysisException] {
sql(
s"""$loadQuery INPATH "${testData.toURI}" INTO TABLE non_part_table PARTITION(ds="1")""")
}
withInputFile { path =>
sql(s"""$loadQuery INPATH "${path.toURI}" INTO TABLE non_part_table""")
// Non-local mode is expected to move the file, while local mode is expected to copy it.
// Check once here that the behavior is the expected.
assert(local === path.exists())
}
checkAnswer(
sql("SELECT * FROM non_part_table WHERE employeeID = 16"),
Row(16, "john") :: Nil)
// Incorrect URI.
// file://path/to/data/files/employee.dat
//
// TODO: need a similar test for non-local mode.
if (local) {
val incorrectUri = "file://path/to/data/files/employee.dat"
intercept[AnalysisException] {
sql(s"""LOAD DATA LOCAL INPATH "$incorrectUri" INTO TABLE non_part_table""")
}
}
// Use URI as inpath:
// file:/path/to/data/files/employee.dat
withInputFile { path =>
sql(s"""$loadQuery INPATH "${path.toURI}" INTO TABLE non_part_table""")
}
checkAnswer(
sql("SELECT * FROM non_part_table WHERE employeeID = 16"),
Row(16, "john") :: Row(16, "john") :: Nil)
// Overwrite existing data.
withInputFile { path =>
sql(s"""$loadQuery INPATH "${path.toURI}" OVERWRITE INTO TABLE non_part_table""")
}
checkAnswer(
sql("SELECT * FROM non_part_table WHERE employeeID = 16"),
Row(16, "john") :: Nil)
sql(
"""
|CREATE TABLE part_table (employeeID INT, employeeName STRING)
|PARTITIONED BY (c STRING, d STRING)
|ROW FORMAT DELIMITED
|FIELDS TERMINATED BY '|'
|LINES TERMINATED BY '\n'
""".stripMargin)
// LOAD DATA INTO partitioned table must specify partition
withInputFile { f =>
val path = f.toURI
intercept[AnalysisException] {
sql(s"""$loadQuery INPATH "$path" INTO TABLE part_table""")
}
intercept[AnalysisException] {
sql(s"""$loadQuery INPATH "$path" INTO TABLE part_table PARTITION(c="1")""")
}
intercept[AnalysisException] {
sql(s"""$loadQuery INPATH "$path" INTO TABLE part_table PARTITION(d="1")""")
}
intercept[AnalysisException] {
sql(s"""$loadQuery INPATH "$path" INTO TABLE part_table PARTITION(c="1", k="2")""")
}
}
withInputFile { f =>
sql(s"""$loadQuery INPATH "${f.toURI}" INTO TABLE part_table PARTITION(c="1", d="2")""")
}
checkAnswer(
sql("SELECT employeeID, employeeName FROM part_table WHERE c = '1' AND d = '2'"),
sql("SELECT * FROM non_part_table").collect())
// Different order of partition columns.
withInputFile { f =>
sql(s"""$loadQuery INPATH "${f.toURI}" INTO TABLE part_table PARTITION(d="1", c="2")""")
}
checkAnswer(
sql("SELECT employeeID, employeeName FROM part_table WHERE c = '2' AND d = '1'"),
sql("SELECT * FROM non_part_table"))
}
}
test("SPARK-28084 case insensitive names of static partitioning in INSERT commands") {
withTable("part_table") {
withSQLConf(SQLConf.CASE_SENSITIVE.key -> "false") {
sql("CREATE TABLE part_table (price int, qty int) partitioned by (year int, month int)")
sql("INSERT INTO part_table PARTITION(YEar = 2015, month = 1) SELECT 1, 1")
checkAnswer(sql("SELECT * FROM part_table"), Row(1, 1, 2015, 1))
}
}
}
test("SPARK-28084 case insensitive names of dynamic partitioning in INSERT commands") {
withTable("part_table") {
withSQLConf(
SQLConf.CASE_SENSITIVE.key -> "false",
"hive.exec.dynamic.partition.mode" -> "nonstrict") {
sql("CREATE TABLE part_table (price int) partitioned by (year int)")
sql("INSERT INTO part_table PARTITION(YEar) SELECT 1, 2019")
checkAnswer(sql("SELECT * FROM part_table"), Row(1, 2019))
}
}
}
test("Truncate Table") {
withTable("non_part_table", "part_table") {
sql(
"""
|CREATE TABLE non_part_table (employeeID INT, employeeName STRING)
|ROW FORMAT DELIMITED
|FIELDS TERMINATED BY '|'
|LINES TERMINATED BY '\n'
""".stripMargin)
val testData = hiveContext.getHiveFile("data/files/employee.dat").toURI
sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE non_part_table""")
checkAnswer(
sql("SELECT * FROM non_part_table WHERE employeeID = 16"),
Row(16, "john") :: Nil)
val testResults = sql("SELECT * FROM non_part_table").collect()
sql("TRUNCATE TABLE non_part_table")
checkAnswer(sql("SELECT * FROM non_part_table"), Seq.empty[Row])
sql(
"""
|CREATE TABLE part_table (employeeID INT, employeeName STRING)
|PARTITIONED BY (c STRING, d STRING)
|ROW FORMAT DELIMITED
|FIELDS TERMINATED BY '|'
|LINES TERMINATED BY '\n'
""".stripMargin)
sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE part_table PARTITION(c="1", d="1")""")
checkAnswer(
sql("SELECT employeeID, employeeName FROM part_table WHERE c = '1' AND d = '1'"),
testResults)
sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE part_table PARTITION(c="1", d="2")""")
checkAnswer(
sql("SELECT employeeID, employeeName FROM part_table WHERE c = '1' AND d = '2'"),
testResults)
sql(s"""LOAD DATA LOCAL INPATH "$testData" INTO TABLE part_table PARTITION(c="2", d="2")""")
checkAnswer(
sql("SELECT employeeID, employeeName FROM part_table WHERE c = '2' AND d = '2'"),
testResults)
sql("TRUNCATE TABLE part_table PARTITION(c='1', d='1')")
checkAnswer(
sql("SELECT employeeID, employeeName FROM part_table WHERE c = '1' AND d = '1'"),
Seq.empty[Row])
checkAnswer(
sql("SELECT employeeID, employeeName FROM part_table WHERE c = '1' AND d = '2'"),
testResults)
sql("TRUNCATE TABLE part_table PARTITION(c='1')")
checkAnswer(
sql("SELECT employeeID, employeeName FROM part_table WHERE c = '1'"),
Seq.empty[Row])
sql("TRUNCATE TABLE part_table")
checkAnswer(
sql("SELECT employeeID, employeeName FROM part_table"),
Seq.empty[Row])
}
}
test("show partitions - show everything") {
checkAnswer(
sql("show partitions parquet_tab4"),
Row("year=2015/month=1") ::
Row("year=2015/month=2") ::
Row("year=2016/month=2") ::
Row("year=2016/month=3") :: Nil)
checkAnswer(
sql("show partitions default.parquet_tab4"),
Row("year=2015/month=1") ::
Row("year=2015/month=2") ::
Row("year=2016/month=2") ::
Row("year=2016/month=3") :: Nil)
}
test("show partitions - show everything more than 5 part keys") {
checkAnswer(
sql("show partitions parquet_tab5"),
Row("year=2016/month=3/hour=10/minute=10/sec=10/extra=1") ::
Row("year=2016/month=4/hour=10/minute=10/sec=10/extra=1") :: Nil)
}
test("show partitions - filter") {
checkAnswer(
sql("show partitions default.parquet_tab4 PARTITION(year=2015)"),
Row("year=2015/month=1") ::
Row("year=2015/month=2") :: Nil)
checkAnswer(
sql("show partitions default.parquet_tab4 PARTITION(year=2015, month=1)"),
Row("year=2015/month=1") :: Nil)
checkAnswer(
sql("show partitions default.parquet_tab4 PARTITION(month=2)"),
Row("year=2015/month=2") ::
Row("year=2016/month=2") :: Nil)
}
test("show partitions - empty row") {
withTempView("parquet_temp") {
sql(
"""
|CREATE TEMPORARY VIEW parquet_temp (c1 INT, c2 STRING)
|USING org.apache.spark.sql.parquet.DefaultSource
""".stripMargin)
// An empty sequence of row is returned for session temporary table.
intercept[NoSuchTableException] {
sql("SHOW PARTITIONS parquet_temp")
}
val message1 = intercept[AnalysisException] {
sql("SHOW PARTITIONS parquet_tab3")
}.getMessage
assert(message1.contains("not allowed on a table that is not partitioned"))
val message2 = intercept[AnalysisException] {
sql("SHOW PARTITIONS parquet_tab4 PARTITION(abcd=2015, xyz=1)")
}.getMessage
assert(message2.contains("Non-partitioning column(s) [abcd, xyz] are specified"))
val message3 = intercept[AnalysisException] {
sql("SHOW PARTITIONS parquet_view1")
}.getMessage
assert(message3.contains("is not allowed on a view"))
}
}
test("show partitions - datasource") {
withTable("part_datasrc") {
val df = (1 to 3).map(i => (i, s"val_$i", i * 2)).toDF("a", "b", "c")
df.write
.partitionBy("a")
.format("parquet")
.mode(SaveMode.Overwrite)
.saveAsTable("part_datasrc")
assert(sql("SHOW PARTITIONS part_datasrc").count() == 3)
}
}
test("SPARK-25918: LOAD DATA LOCAL INPATH should handle a relative path") {
val localFS = FileContext.getLocalFSFileContext()
val workingDir = localFS.getWorkingDirectory
val r = LoadDataCommand.makeQualified(
FsConstants.LOCAL_FS_URI, workingDir, new Path("kv1.txt"))
assert(r === new Path(s"$workingDir/kv1.txt"))
}
}
| dbtsai/spark | sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveCommandSuite.scala | Scala | apache-2.0 | 18,361 |
package org.scaladebugger.api.lowlevel.threads
import org.scaladebugger.api.lowlevel.requests.JDIRequestArgument
import org.scaladebugger.api.utils.PendingActionManager
import scala.util.{Success, Try}
/**
* Provides pending thread start capabilities to an existing
* thread start manager.
*/
trait PendingThreadStartSupport extends PendingThreadStartSupportLike {
/**
* Represents the manager used to store pending thread start requests and
* process them later.
*/
protected val pendingActionManager: PendingActionManager[ThreadStartRequestInfo]
/**
* Processes all pending thread start requests.
*
* @return The collection of successfully-processed thread start requests
*/
override def processAllPendingThreadStartRequests(): Seq[ThreadStartRequestInfo] = {
pendingActionManager.processAllActions().map(_.data)
}
/**
* Retrieves a list of pending thread start requests.
*
* @return The collection of thread start requests
*/
override def pendingThreadStartRequests: Seq[ThreadStartRequestInfo] = {
pendingActionManager.getPendingActionData(_ => true)
}
/**
* Creates a new thread start request.
*
* @param requestId The id of the request used to retrieve and delete it
* @param extraArguments Any additional arguments to provide to the request
*
* @return Success(id) if successful, otherwise Failure
*/
abstract override def createThreadStartRequestWithId(
requestId: String,
extraArguments: JDIRequestArgument*
): Try[String] = {
def createThreadStartRequest() = super.createThreadStartRequestWithId(
requestId,
extraArguments: _*
)
val result = createThreadStartRequest()
// If failed, add as pending
result.recoverWith {
case _: Throwable if isPendingSupportEnabled =>
pendingActionManager.addPendingActionWithId(
requestId,
ThreadStartRequestInfo(requestId, isPending = true, extraArguments),
() => createThreadStartRequest().get
)
Success(requestId)
case _: Throwable => result
}
}
/**
* Removes the specified thread start request.
*
* @param id The id of the Class Unload Request
*
* @return True if the thread start request was removed (if it existed),
* otherwise false
*/
abstract override def removeThreadStartRequest(id: String): Boolean = {
val result = super.removeThreadStartRequest(id)
val pendingResult = pendingActionManager.removePendingActionsWithId(id)
// True if we removed a real thread start request or any
// pending thread start request
result || pendingResult.getOrElse(Nil).nonEmpty
}
}
| ensime/scala-debugger | scala-debugger-api/src/main/scala/org/scaladebugger/api/lowlevel/threads/PendingThreadStartSupport.scala | Scala | apache-2.0 | 2,685 |
package turkey.tasks
import upickle.default._
trait ResponseReader[Request <: { type Response }] {
def getReader(request: Request): Reader[request.Response]
}
trait ResponseWriter[Request <: { type Response }] {
def getWriter(request: Request): Writer[request.Response]
}
trait ResponseRW[
Request <: { type Response }
] extends ResponseReader[Request] with ResponseWriter[Request]
| julianmichael/turkey | turkey/shared/src/main/scala/turkey/tasks/ResponseRW.scala | Scala | mit | 392 |
package algorithms.graphs.flow
import internal.Preconditions
import structures.graph.UnDirectedGraph
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
class FordFulkerson {
private val NO_PREVIOUS_NODE_IN_FLOW = -1
def maxFlow(graph: UnDirectedGraph, source: Int, destination: Int): Int = {
Preconditions.checkThat(source <= graph.getNodeNum)
Preconditions.checkThat(destination <= graph.getNodeNum)
Preconditions.checkThat(source != destination)
val capacity = getInitialCapacity(graph)
var result = 0
var capacityToGain = Int.MaxValue
while (capacityToGain > 0) {
capacityToGain = findCapacityOfAugmentingPath(graph, source, destination, capacity)
result += capacityToGain
}
result
}
private def findCapacityOfAugmentingPath(graph: UnDirectedGraph, source: Int,
destination: Int, capacity: Array[Array[Int]]): Int = {
val visited = new Array[Boolean](graph.getNodeNum + 1)
val from = Array.fill[Int](graph.getNodeNum + 1)(NO_PREVIOUS_NODE_IN_FLOW)
visited(source) = true
val queue = new mutable.Queue[Int]
queue += source
while (queue.nonEmpty) {
val where: Int = queue.dequeue
for (adjacent: Int <- getAdjacent(graph, where)
if !visited(adjacent)
if capacity(where)(adjacent) > 0) {
queue += adjacent
visited(adjacent) = true
from(adjacent) = where
if (destination == adjacent) {
queue.clear()
}
}
}
val pathCapacity = getAugmentedPathCapacity(capacity, from, destination)
updateResidualNetworkCapacity(capacity, from, destination, pathCapacity)
pathCapacity
}
def getAdjacent(graph: UnDirectedGraph, where: Int): Array[Int] = {
var edges = ArrayBuffer.empty[Int]
for (edge <- graph.getEdges) {
if (edge.getDestination == where
&& edge.getSource != where) {
edges += edge.getSource
} else if (edge.getSource == where
&& edge.getDestination != where) {
edges += edge.getDestination
}
}
edges.toArray
}
private def getInitialCapacity(graph: UnDirectedGraph): Array[Array[Int]] = {
val capacity: Array[Array[Int]] = Array.fill[Int](graph.getNodeNum + 1, graph.getNodeNum + 1)(0)
for (edge <- graph.getEdges) {
val existing = capacity(edge.getSource)(edge.getDestination)
// model multiple edges from a -> b as a single edge of combined weight
val newCapacity = existing + edge.getLength
capacity(edge.getSource)(edge.getDestination) = newCapacity
capacity(edge.getDestination)(edge.getSource) = newCapacity
}
capacity
}
private def getAugmentedPathCapacity(capacity: Array[Array[Int]],
from: Array[Int],
destination: Int): Int = {
var pathCapacity = Int.MaxValue
var where = destination
var prev = from(where)
while (prev != NO_PREVIOUS_NODE_IN_FLOW) {
val capacityFromThisEdge = capacity(prev)(where)
pathCapacity = StrictMath.min(pathCapacity, capacityFromThisEdge)
where = prev
prev = from(where)
}
if (where == destination) { // destination was not reachable
pathCapacity = 0
}
pathCapacity
}
private def updateResidualNetworkCapacity(capacity: Array[Array[Int]],
from: Array[Int],
destination: Int,
pathCapacity: Int) = {
var where = destination
while (from(where) != NO_PREVIOUS_NODE_IN_FLOW) {
val prev = from(where)
capacity(prev)(where) -= pathCapacity
capacity(where)(prev) += pathCapacity
where = prev
}
}
}
| willb611/algorithms | src/main/scala/algorithms/graphs/flow/FordFulkerson.scala | Scala | apache-2.0 | 3,776 |
package com.gx.mediator
/**
* Copyright 2017 josephguan
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
object App extends App {
val china = new China
val usa = new USA
val canada = new Canada
val united = new UnitedNations
united.addMember(china)
united.addMember(usa)
united.addMember(canada)
usa.declare("Hello World")
}
| josephguan/scala-design-patterns | behavioral/mediator/src/main/scala/com/gx/mediator/App.scala | Scala | apache-2.0 | 874 |
package scala.macros.tests
package scaladays
import scala.macros._
import scala.language.experimental.macros
class main extends MacroAnnotation {
def apply(defn: Any): Any = macro {
val q"..$mods object $name extends ..$inits { $self => ..$stats }" = defn
val main = q"def main(args: Array[String]): Unit = { ..$stats }"
q"..$mods object $name extends ..$inits { $self => $main }"
}
} | xeno-by/scalamacros | tests/macros/src/main/scala/scala/macros/tests/scaladays/main.scala | Scala | bsd-3-clause | 402 |
package monocle
import monocle.law.discipline.{SetterTests, TraversalTests}
import org.scalacheck.Arbitrary
import org.scalacheck.Arbitrary.arbitrary
import cats.Eq
import cats.arrow.{Category, Choice, Compose}
import cats.data.{Chain, NonEmptyChain, NonEmptyList, NonEmptyVector}
import cats.syntax.either._
import scala.collection.immutable
class TraversalSpec extends MonocleSuite {
case class Location(latitude: Int, longitude: Int, name: String)
val coordinates: Traversal[Location, Int] =
Traversal.apply2[Location, Int](_.latitude, _.longitude) { case (newLat, newLong, oldLoc) =>
oldLoc.copy(latitude = newLat, longitude = newLong)
}
def eachL[A]: Traversal[List[A], A] = PTraversal.fromTraverse[List, A, A]
val eachLi: Traversal[List[Int], Int] = eachL[Int]
def eachL2[A, B]: Traversal[List[(A, B)], (A, B)] = eachL[(A, B)]
implicit val locationGen: Arbitrary[Location] = Arbitrary(for {
x <- arbitrary[Int]
y <- arbitrary[Int]
n <- arbitrary[String]
} yield Location(x, y, n))
implicit val exampleEq: Eq[Location] = Eq.fromUniversalEquals[Location]
// Below we test a 7-lenses Traversal created using applyN
// the test object
case class ManyPropObject(p1: Int, p2: Int, p3: String, p4: Int, p5: Int, p6: Int, p7: Int, p8: Int)
// the 7 lenses for each int properties of the test object
val l1: Lens[ManyPropObject, Int] = Lens((_: ManyPropObject).p1)(newValue => _.copy(p1 = newValue))
val l2: Lens[ManyPropObject, Int] = Lens((_: ManyPropObject).p2)(newValue => _.copy(p2 = newValue))
val l3: Lens[ManyPropObject, Int] = Lens((_: ManyPropObject).p4)(newValue => _.copy(p4 = newValue))
val l4: Lens[ManyPropObject, Int] = Lens((_: ManyPropObject).p5)(newValue => _.copy(p5 = newValue))
val l5: Lens[ManyPropObject, Int] = Lens((_: ManyPropObject).p6)(newValue => _.copy(p6 = newValue))
val l6: Lens[ManyPropObject, Int] = Lens((_: ManyPropObject).p7)(newValue => _.copy(p7 = newValue))
val l7: Lens[ManyPropObject, Int] = Lens((_: ManyPropObject).p8)(newValue => _.copy(p8 = newValue))
// the 7-lenses Traversal generated using applyN
val traversalN: Traversal[ManyPropObject, Int] =
Traversal.applyN(l1, l2, l3, l4, l5, l6, l7)
// the stub for generating random test objects
implicit val manyPropObjectGen: Arbitrary[ManyPropObject] = Arbitrary(for {
p1 <- arbitrary[Int]
p2 <- arbitrary[Int]
p3 <- arbitrary[String]
p4 <- arbitrary[Int]
p5 <- arbitrary[Int]
p6 <- arbitrary[Int]
p7 <- arbitrary[Int]
p8 <- arbitrary[Int]
} yield ManyPropObject(p1, p2, p3, p4, p5, p6, p7, p8))
implicit val eqForManyPropObject: Eq[ManyPropObject] = Eq.fromUniversalEquals[ManyPropObject]
checkAll("apply2 Traversal", TraversalTests(coordinates))
checkAll("applyN Traversal", TraversalTests(traversalN))
checkAll("fromTraverse Traversal", TraversalTests(eachLi))
checkAll("traversal.asSetter", SetterTests(coordinates.asSetter))
// test implicit resolution of type classes
test("Traversal has a Compose instance") {
assertEquals(
Compose[Traversal]
.compose(coordinates, eachL[Location])
.modify(_ + 1)(List(Location(1, 2, ""), Location(3, 4, ""))),
List(Location(2, 3, ""), Location(4, 5, ""))
)
}
test("Traversal has a Category instance") {
assertEquals(Category[Traversal].id[Int].getAll(3), List(3))
}
test("Traversal has a Choice instance") {
assertEquals(
Choice[Traversal]
.choice(eachL[Int], coordinates)
.modify(_ + 1)(Left(List(1, 2, 3))),
Left(List(2, 3, 4))
)
}
test("foldMap") {
assertEquals(eachLi.foldMap(_.toString)(List(1, 2, 3, 4, 5)), "12345")
}
test("getAll") {
assertEquals(eachLi.getAll(List(1, 2, 3, 4)), List(1, 2, 3, 4))
}
test("headOption") {
assertEquals(eachLi.headOption(List(1, 2, 3, 4)), Some(1))
}
test("lastOption") {
assertEquals(eachLi.lastOption(List(1, 2, 3, 4)), Some(4))
}
test("length") {
assertEquals(eachLi.length(List(1, 2, 3, 4)), 4)
assertEquals(eachLi.length(Nil), 0)
}
test("isEmpty") {
assertEquals(eachLi.isEmpty(List(1, 2, 3, 4)), false)
assertEquals(eachLi.isEmpty(Nil), true)
}
test("nonEmpty") {
assertEquals(eachLi.nonEmpty(List(1, 2, 3, 4)), true)
assertEquals(eachLi.nonEmpty(Nil), false)
}
test("find") {
assertEquals(eachLi.find(_ > 2)(List(1, 2, 3, 4)), Some(3))
assertEquals(eachLi.find(_ > 9)(List(1, 2, 3, 4)), None)
}
test("exist") {
assertEquals(eachLi.exist(_ > 2)(List(1, 2, 3, 4)), true)
assertEquals(eachLi.exist(_ > 9)(List(1, 2, 3, 4)), false)
assertEquals(eachLi.exist(_ > 9)(Nil), false)
}
test("all") {
assertEquals(eachLi.all(_ > 2)(List(1, 2, 3, 4)), false)
assertEquals(eachLi.all(_ > 0)(List(1, 2, 3, 4)), true)
assertEquals(eachLi.all(_ > 0)(Nil), true)
}
test("set") {
assertEquals(eachLi.replace(0)(List(1, 2, 3, 4)), List(0, 0, 0, 0))
}
test("modify") {
assertEquals(eachLi.modify(_ + 1)(List(1, 2, 3, 4)), List(2, 3, 4, 5))
}
test("parModifyF") {
assertEquals(
eachLi.parModifyF[Either[Unit, *]](i => (i + 1).asRight[Unit])(List(1, 2, 3, 4)),
Right(List(2, 3, 4, 5))
)
// `Left` values should be accumulated through `Validated`.
assertEquals(eachLi.parModifyF[Either[String, *]](_.toString.asLeft[Int])(List(1, 2, 3, 4)), Left("1234"))
}
test("to") {
assertEquals(eachLi.to(_.toString()).getAll(List(1, 2, 3)), List("1", "2", "3"))
}
test("some") {
val numbers = List(Some(1), None, Some(2), None)
val traversal = Traversal.fromTraverse[List, Option[Int]]
assertEquals(traversal.some.replace(5)(numbers), List(Some(5), None, Some(5), None))
assertEquals(numbers.focus().andThen(traversal).some.replace(5), List(Some(5), None, Some(5), None))
}
test("withDefault") {
val numbers = List(Some(1), None, Some(2), None)
val traversal = Traversal.fromTraverse[List, Option[Int]]
assertEquals(traversal.withDefault(0).modify(_ + 1)(numbers), List(Some(2), Some(1), Some(3), Some(1)))
assertEquals(
numbers.focus().andThen(traversal).withDefault(0).modify(_ + 1),
List(Some(2), Some(1), Some(3), Some(1))
)
}
test("each") {
val numbers = List(List(1, 2, 3), Nil, List(4), Nil)
val traversal = Traversal.fromTraverse[List, List[Int]]
assertEquals(traversal.each.getAll(numbers), List(1, 2, 3, 4))
assertEquals(numbers.focus().andThen(traversal).each.getAll, List(1, 2, 3, 4))
}
test("filter") {
val numbers = List(1, 2, 3)
val traversal = Traversal.fromTraverse[List, Int]
assertEquals(traversal.filter(_ > 1).getAll(numbers), List(2, 3))
assertEquals(numbers.focus().andThen(traversal).filter(_ > 1).getAll, List(2, 3))
}
test("filterIndex") {
val words = List(List("hello", "world"), List("hey", "hi"))
val traversal = Traversal.fromTraverse[List, List[String]]
assertEquals(traversal.filterIndex((_: Int) > 0).getAll(words), List("world", "hi"))
assertEquals(words.focus().andThen(traversal).filterIndex((_: Int) > 0).getAll, List("world", "hi"))
}
test("at") {
val sortedMap = immutable.SortedMap(1 -> "one")
val sortedMapTraversal = Iso.id[immutable.SortedMap[Int, String]].asTraversal
assertEquals(sortedMapTraversal.at(1).getAll(sortedMap), List(Some("one")))
assertEquals(sortedMapTraversal.at(0).getAll(sortedMap), List(None))
assertEquals(sortedMap.focus().andThen(sortedMapTraversal).at(1).getAll, List(Some("one")))
assertEquals(sortedMap.focus().andThen(sortedMapTraversal).at(0).getAll, List(None))
val listMap = immutable.ListMap(1 -> "one")
val listMapTraversal = Iso.id[immutable.ListMap[Int, String]].asTraversal
assertEquals(listMapTraversal.at(1).getAll(listMap), List(Some("one")))
assertEquals(listMapTraversal.at(0).getAll(listMap), List(None))
assertEquals(listMap.focus().andThen(listMapTraversal).at(1).getAll, List(Some("one")))
assertEquals(listMap.focus().andThen(listMapTraversal).at(0).getAll, List(None))
val map = immutable.Map(1 -> "one")
val mapTraversal = Iso.id[Map[Int, String]].asTraversal
assertEquals(mapTraversal.at(1).getAll(map), List(Some("one")))
assertEquals(mapTraversal.at(0).getAll(map), List(None))
assertEquals(map.focus().andThen(mapTraversal).at(1).getAll, List(Some("one")))
assertEquals(map.focus().andThen(mapTraversal).at(0).getAll, List(None))
val set = Set(1)
val setTraversal = Iso.id[Set[Int]].asTraversal
assertEquals(setTraversal.at(1).getAll(set), List(true))
assertEquals(setTraversal.at(0).getAll(set), List(false))
assertEquals(set.focus().andThen(setTraversal).at(1).getAll, List(true))
assertEquals(set.focus().andThen(setTraversal).at(0).getAll, List(false))
}
test("index") {
val list = List(1)
val listTraversal = Iso.id[List[Int]].asTraversal
assertEquals(listTraversal.index(0).getAll(list), List(1))
assertEquals(listTraversal.index(1).getAll(list), Nil)
assertEquals(list.focus().andThen(listTraversal).index(0).getAll, List(1))
assertEquals(list.focus().andThen(listTraversal).index(1).getAll, Nil)
val lazyList = LazyList(1)
val lazyListTraversal = Iso.id[LazyList[Int]].asTraversal
assertEquals(lazyListTraversal.index(0).getAll(lazyList), List(1))
assertEquals(lazyListTraversal.index(1).getAll(lazyList), Nil)
assertEquals(lazyList.focus().andThen(lazyListTraversal).index(0).getAll, List(1))
assertEquals(lazyList.focus().andThen(lazyListTraversal).index(1).getAll, Nil)
val listMap = immutable.ListMap(1 -> "one")
val listMapTraversal = Iso.id[immutable.ListMap[Int, String]].asTraversal
assertEquals(listMapTraversal.index(0).getAll(listMap), Nil)
assertEquals(listMapTraversal.index(1).getAll(listMap), List("one"))
assertEquals(listMap.focus().andThen(listMapTraversal).index(0).getAll, Nil)
assertEquals(listMap.focus().andThen(listMapTraversal).index(1).getAll, List("one"))
val map = Map(1 -> "one")
val mapTraversal = Iso.id[Map[Int, String]].asTraversal
assertEquals(mapTraversal.index(0).getAll(map), Nil)
assertEquals(mapTraversal.index(1).getAll(map), List("one"))
assertEquals(map.focus().andThen(mapTraversal).index(0).getAll, Nil)
assertEquals(map.focus().andThen(mapTraversal).index(1).getAll, List("one"))
val sortedMap = immutable.SortedMap(1 -> "one")
val sortedMapTraversal = Iso.id[immutable.SortedMap[Int, String]].asTraversal
assertEquals(sortedMapTraversal.index(0).getAll(sortedMap), Nil)
assertEquals(sortedMapTraversal.index(1).getAll(sortedMap), List("one"))
assertEquals(sortedMap.focus().andThen(sortedMapTraversal).index(0).getAll, Nil)
assertEquals(sortedMap.focus().andThen(sortedMapTraversal).index(1).getAll, List("one"))
val vector = Vector(1)
val vectorTraversal = Iso.id[Vector[Int]].asTraversal
assertEquals(vectorTraversal.index(0).getAll(vector), List(1))
assertEquals(vectorTraversal.index(1).getAll(vector), Nil)
assertEquals(vector.focus().andThen(vectorTraversal).index(0).getAll, List(1))
assertEquals(vector.focus().andThen(vectorTraversal).index(1).getAll, Nil)
val chain = Chain.one(1)
val chainTraversal = Iso.id[Chain[Int]].asTraversal
assertEquals(chainTraversal.index(0).getAll(chain), List(1))
assertEquals(chainTraversal.index(1).getAll(chain), Nil)
assertEquals(chain.focus().andThen(chainTraversal).index(0).getAll, List(1))
assertEquals(chain.focus().andThen(chainTraversal).index(1).getAll, Nil)
val nec = NonEmptyChain.one(1)
val necTraversal = Iso.id[NonEmptyChain[Int]].asTraversal
assertEquals(necTraversal.index(0).getAll(nec), List(1))
assertEquals(necTraversal.index(1).getAll(nec), Nil)
assertEquals(nec.focus().andThen(necTraversal).index(0).getAll, List(1))
assertEquals(nec.focus().andThen(necTraversal).index(1).getAll, Nil)
val nev = NonEmptyVector.one(1)
val nevTraversal = Iso.id[NonEmptyVector[Int]].asTraversal
assertEquals(nevTraversal.index(0).getAll(nev), List(1))
assertEquals(nevTraversal.index(1).getAll(nev), Nil)
assertEquals(nev.focus().andThen(nevTraversal).index(0).getAll, List(1))
assertEquals(nev.focus().andThen(nevTraversal).index(1).getAll, Nil)
val nel = NonEmptyList.one(1)
val nelTraversal = Iso.id[NonEmptyList[Int]].asTraversal
assertEquals(nelTraversal.index(0).getAll(nel), List(1))
assertEquals(nelTraversal.index(1).getAll(nel), Nil)
assertEquals(nel.focus().andThen(nelTraversal).index(0).getAll, List(1))
assertEquals(nel.focus().andThen(nelTraversal).index(1).getAll, Nil)
}
}
| julien-truffaut/Monocle | test/shared/src/test/scala/monocle/TraversalSpec.scala | Scala | mit | 12,892 |
package ohnosequences.cosas.types
import ohnosequences.cosas.fns._
trait AnyFunctionType extends AnyType {
type Domain <: AnyType
val domain: Domain
type Codomain <: AnyType
val codomain: Codomain
type Raw = AnyDepFn1 { type In1 = Domain#Raw; type Out = Codomain#Raw }
}
case class ==>[A <: AnyType, B <: AnyType](val domain: A, val codomain: B) extends AnyFunctionType {
type Domain = A
type Codomain = B
lazy val label: String = s"${domain.label} ==> ${codomain.label}"
}
| ohnosequences/cosas | src/main/scala/cosas/types/functionTypes.scala | Scala | agpl-3.0 | 500 |
import reactivemongo.bson._
import org.specs2.mutable._
import reactivemongo.bson.exceptions.DocumentKeyNotFound
import reactivemongo.bson.Macros.Annotations.Key
class Macros extends Specification {
type Handler[A] = BSONDocumentReader[A] with BSONDocumentWriter[A] with BSONHandler[BSONDocument, A]
def roundtrip[A](original: A, format: BSONReader[BSONDocument, A] with BSONWriter[A, BSONDocument]) = {
val serialized = format write original
val deserialized = format read serialized
original mustEqual deserialized
}
def roundtripImp[A](data:A)(implicit format: BSONReader[BSONDocument, A] with BSONWriter[A, BSONDocument]) = roundtrip(data, format)
case class Person(firstName: String, lastName: String)
case class Pet(name: String, owner: Person)
case class Primitives(dbl: Double, str: String, bl: Boolean, int: Int, long: Long)
case class Optional(name: String, value: Option[String])
case class Single(value: String)
case class OptionalSingle(value: Option[String])
case class SingleTuple(value: (String, String))
case class User(_id: BSONObjectID = BSONObjectID.generate, name: String)
case class WordLover(name: String, words: Seq[String])
case class Empty()
object EmptyObject
case class RenamedId(@Key("_id") myID: BSONObjectID = BSONObjectID.generate, @CustomAnnotation value: String)
object Nest {
case class Nested(name: String)
}
case class OverloadedApply(string: String)
object OverloadedApply {
def apply(n: Int) {
println(n)
}
def apply(seq: Seq[String]): OverloadedApply = OverloadedApply(seq mkString " ")
}
object Union {
sealed trait UT
case class UA(n: Int) extends UT
case class UB(s: String) extends UT
case class UC(s: String) extends UT
case class UD(s: String) extends UT
}
trait NestModule {
case class Nested(name: String)
val format = Macros.handler[Nested]
}
object TreeModule {
//due to compiler limitations(read: only workaround I found), handlers must be defined here
//and explicit type annotations added to enable compiler to use implicit handlers recursively
sealed trait Tree
case class Node(left: Tree, right: Tree) extends Tree
case class Leaf(data: String) extends Tree
object Tree {
import Macros.Options._
implicit val bson: Handler[Tree] = Macros.handlerOpts[Tree, UnionType[Node \/ Leaf]]
}
}
object TreeCustom{
sealed trait Tree
case class Node(left: Tree, right: Tree) extends Tree
case class Leaf(data: String) extends Tree
object Leaf {
private val helper = Macros.handler[Leaf]
implicit val bson: Handler[Leaf] = new BSONDocumentReader[Leaf] with BSONDocumentWriter[Leaf] with BSONHandler[BSONDocument, Leaf] {
def write(t: Leaf): BSONDocument = helper.write(Leaf("hai"))
def read(bson: BSONDocument): Leaf = helper read bson
}
}
object Tree {
import Macros.Options._
implicit val bson: Handler[Tree] = Macros.handlerOpts[Tree, UnionType[Node \/ Leaf] with Verbose]
}
}
object IntListModule {
sealed trait IntList
case class Cons(head: Int, tail: IntList) extends IntList
case object Tail extends IntList
object IntList{
import Macros.Options._
implicit val bson: Handler[IntList] = Macros.handlerOpts[IntList, UnionType[Cons \/ Tail.type]]
}
}
object InheritanceModule {
sealed trait T
case class A() extends T
case object B extends T
sealed trait TT extends T
case class C() extends TT
}
"Formatter" should {
"handle primitives" in {
roundtrip(
Primitives(1.2, "hai", true, 42, Long.MaxValue),
Macros.handler[Primitives])
}
"support nesting" in {
implicit val personFormat = Macros.handler[Person]
val doc = Pet("woof", Person("john", "doe"))
roundtrip(doc, Macros.handler[Pet])
}
"support option" in {
val format = Macros.handler[Optional]
val some = Optional("some", Some("value"))
val none = Optional("none", None)
roundtrip(some, format)
roundtrip(none, format)
}
"support seq" in {
roundtrip(
WordLover("john", Seq("hello", "world")),
Macros.handler[WordLover]
)
}
"support single member case classes" in {
roundtrip(
Single("Foo"),
Macros.handler[Single])
}
"support single member options" in {
val f = Macros.handler[OptionalSingle]
roundtrip(OptionalSingle(Some("foo")), f)
roundtrip(OptionalSingle(None), f)
}
"support case class definitions inside an object" in {
import Nest._
roundtrip(Nested("foo"), Macros.handler[Nested])
}
"handle overloaded apply correctly" in {
val doc1 = OverloadedApply("hello")
val doc2 = OverloadedApply(List("hello", "world"))
val f = Macros.handler[OverloadedApply]
roundtrip(doc1, f)
roundtrip(doc2, f)
}
"case class and handler inside trait" in {
val t = new NestModule {}
roundtrip(t.Nested("it works"), t.format)
}
"case class inside trait with handler outside" in {
val t = new NestModule {}
import t._ //you need Nested in scope because t.Nested won't work
val format = Macros.handler[Nested]
roundtrip(Nested("it works"), format)
}
"respect compilation options" in {
val format = Macros.handlerOpts[Person, Macros.Options.Verbose] //more stuff in compiler log
roundtrip(Person("john", "doe"), format)
}
"persist class name on demand" in {
val person = Person("john", "doe")
val format = Macros.handlerOpts[Person, Macros.Options.SaveClassName]
val doc = format write person
doc.getAs[String]("className") mustEqual Some("Macros.Person")
roundtrip(person, format)
}
"persist simple class name on demand" in {
val person = Person("john", "doe")
val format = Macros.handlerOpts[Person, Macros.Options.SaveSimpleName]
val doc = format write person
doc.getAs[String]("className") mustEqual Some("Person")
roundtrip(person, format)
}
"handle union types(ADT)" in {
import Union._
import Macros.Options._
val a = UA(1)
val b = UB("hai")
val format = Macros.handlerOpts[UT, UnionType[UA \/ UB \/ UC \/ UD]]
println(BSONDocument pretty (format write a))
println(BSONDocument pretty (format write b))
format.write(a).getAs[String]("className") mustEqual Some("Macros.Union.UA")
format.write(b).getAs[String]("className") mustEqual Some("Macros.Union.UB")
roundtrip(a, format)
roundtrip(b, format)
}
"handle union types(ADT) with simple names" in {
import Union._
import Macros.Options._
val a = UA(1)
val b = UB("hai")
val format = Macros.handlerOpts[UT, SimpleUnionType[UA \/ UB \/ UC \/ UD]]
println(BSONDocument pretty (format write a))
println(BSONDocument pretty (format write b))
format.write(a).getAs[String]("className") mustEqual Some("UA")
format.write(b).getAs[String]("className") mustEqual Some("UB")
roundtrip(a, format)
roundtrip(b, format)
}
"handle recursive structure" in {
import TreeModule._
//handlers defined at tree module
val tree: Tree = Node(Leaf("hi"), Node(Leaf("hello"), Leaf("world")))
roundtrip(tree, Tree.bson)
}
"grab an implicit handler for type used in union" in {
import TreeCustom._
val tree: Tree = Node(Leaf("hi"), Node(Leaf("hello"), Leaf("world")))
val serialized = BSON writeDocument tree
val deserialized = BSON.readDocument[Tree](serialized)
val expected = Node(Leaf("hai"), Node(Leaf("hai"),Leaf("hai")))
deserialized mustEqual expected
}
"handle empty case classes" in {
val empty = Empty()
val format = Macros.handler[Empty]
roundtrip(empty, format)
}
"do nothing with objects" in {
val format = Macros.handler[EmptyObject.type]
roundtrip(EmptyObject, format)
}
"handle ADTs with objects" in {
import IntListModule._
roundtripImp[IntList](Tail)
roundtripImp[IntList](Cons(1, Cons(2, Cons(3, Tail))))
}
"automate Union on sealed traits" in {
import Macros.Options._
import Union._
implicit val format = Macros.handlerOpts[UT, AllImplementations]
format.write(UA(1)).getAs[String]("className") mustEqual Some("Macros.Union.UA")
format.write(UB("buzz")).getAs[String]("className") mustEqual Some("Macros.Union.UB")
roundtripImp[UT](UA(17))
roundtripImp[UT](UB("foo"))
roundtripImp[UT](UC("bar"))
roundtripImp[UT](UD("baz"))
}
"support automatic implementations search with nested traits" in {
import Macros.Options._
import InheritanceModule._
implicit val format = Macros.handlerOpts[T, AllImplementations]
format.write(A()).getAs[String]("className") mustEqual Some("Macros.InheritanceModule.A")
format.write(B).getAs[String]("className") mustEqual Some("Macros.InheritanceModule.B")
roundtripImp[T](A())
roundtripImp[T](B)
roundtripImp[T](C())
}
"automate Union on sealed traits with simple name" in {
import Macros.Options._
import Union._
implicit val format = Macros.handlerOpts[UT, SimpleAllImplementations]
format.write(UA(1)).getAs[String]("className") mustEqual Some("UA")
format.write(UB("buzz")).getAs[String]("className") mustEqual Some("UB")
roundtripImp[UT](UA(17))
roundtripImp[UT](UB("foo"))
roundtripImp[UT](UC("bar"))
roundtripImp[UT](UD("baz"))
}
"support automatic implementations search with nested traits with simple name" in {
import Macros.Options._
import InheritanceModule._
implicit val format = Macros.handlerOpts[T, SimpleAllImplementations]
format.write(A()).getAs[String]("className") mustEqual Some("A")
format.write(B).getAs[String]("className") mustEqual Some("B")
roundtripImp[T](A())
roundtripImp[T](B)
roundtripImp[T](C())
}
"support overriding keys with annotations" in {
implicit val format = Macros.handler[RenamedId]
val doc = RenamedId(value = "some value")
val serialized = format write doc
println("renaming")
println(BSONDocument.pretty(serialized))
serialized mustEqual BSONDocument("_id" -> doc.myID, "value" -> doc.value)
format.read(serialized) mustEqual doc
}
}
"Reader" should {
"throw meaningful exception if required field is missing" in {
val personDoc = BSONDocument("firstName" -> "joe")
Macros.reader[Person].read(personDoc) must throwA[DocumentKeyNotFound].like {
case e => e.getMessage must contain("lastName")
}
}
"throw meaningful exception if field has another type" in {
val primitivesDoc = BSONDocument("dbl" -> 2D, "str" -> "str", "bl" -> true, "int" -> 2D, "long" -> 2L)
Macros.reader[Primitives].read(primitivesDoc) must throwA[ClassCastException].like {
case e =>
e.getMessage must contain(classOf[BSONDouble].getName)
e.getMessage must contain(classOf[BSONInteger].getName)
}
}
}
}
| lunatech-labs/lunatech-reactivemongo | macros/src/test/scala/macrospec.scala | Scala | apache-2.0 | 11,310 |
package com.wavesplatform.features
import com.wavesplatform.features.BlockchainFeatureStatus.{Activated, Approved, Undefined}
import com.wavesplatform.features.api.NodeFeatureStatus.{Implemented, NotImplemented, Voted}
import play.api.libs.json._
package object api {
implicit val nodeFeatureStatusFormat: Format[NodeFeatureStatus] =
new Format[NodeFeatureStatus] {
private val notimplemented = "NOT_IMPLEMENTED"
private val implemented = "IMPLEMENTED"
private val voted = "VOTED"
override def reads(json: JsValue): JsResult[NodeFeatureStatus] =
json match {
case JsString(`notimplemented`) => JsSuccess(NotImplemented)
case JsString(`implemented`) => JsSuccess(Implemented)
case JsString(`voted`) => JsSuccess(Voted)
case _ => ???
}
override def writes(o: NodeFeatureStatus): JsValue = {
o match {
case NotImplemented => JsString(notimplemented)
case Implemented => JsString(implemented)
case Voted => JsString(voted)
}
}
}
implicit val blockchainFeatureStatusFormat: Format[BlockchainFeatureStatus] =
new Format[BlockchainFeatureStatus] {
private val undefined = "VOTING"
private val approved = "APPROVED"
private val activated = "ACTIVATED"
override def reads(json: JsValue): JsResult[BlockchainFeatureStatus] =
json match {
case JsString(`undefined`) => JsSuccess(Undefined)
case JsString(`approved`) => JsSuccess(Approved)
case JsString(`activated`) => JsSuccess(Activated)
case _ => ???
}
override def writes(o: BlockchainFeatureStatus): JsValue = {
o match {
case Undefined => JsString(undefined)
case Approved => JsString(approved)
case Activated => JsString(activated)
}
}
}
implicit val activationStatusFeatureFormat: Format[FeatureActivationStatus] = Json.format
implicit val activationStatusFormat: Format[ActivationStatus] = Json.format
}
| wavesplatform/Waves | node/src/main/scala/com/wavesplatform/features/api/package.scala | Scala | mit | 2,158 |
package scala.collection
package immutable
import mutable.{Builder, StringBuilder}
/**
* This class serves as a wrapper augmenting `String`s with all the operations
* found in indexed sequences.
*
* The difference between this class and `StringOps` is that calling transformer
* methods such as `filter` and `map` will yield an object of type `WrappedString`
* rather than a `String`.
*
* @param self a string contained within this wrapped string
*
* @since 2.8
* @define Coll `WrappedString`
* @define coll wrapped string
*/
final class WrappedString(val self: String) extends AbstractSeq[Char] with IndexedSeq[Char]
with IndexedSeqOps[Char, IndexedSeq, WrappedString] {
def apply(i: Int): Char = self.charAt(i)
override protected def fromSpecificIterable(coll: scala.collection.Iterable[Char]): WrappedString =
WrappedString.fromSpecific(coll)
override protected def newSpecificBuilder(): Builder[Char, WrappedString] = WrappedString.newBuilder()
override def slice(from: Int, until: Int): WrappedString = {
val start = if (from < 0) 0 else from
if (until <= start || start >= self.length)
return new WrappedString("")
val end = if (until > length) length else until
new WrappedString(self.substring(start, end))
}
override def length = self.length
override def toString = self
override def view: StringView = new StringView(self)
}
/** A companion object for wrapped strings.
*
* @since 2.8
*/
object WrappedString extends SpecificIterableFactory[Char, WrappedString] {
def fromSpecific(it: IterableOnce[Char]): WrappedString = {
val b = newBuilder()
val s = it.knownSize
if(s >= 0) b.sizeHint(s)
b ++= it
b.result()
}
val empty: WrappedString = new WrappedString("")
def newBuilder(): Builder[Char, WrappedString] =
new StringBuilder().mapResult(x => new WrappedString(x))
}
| rorygraves/perf_tester | corpus/scala-library/src/main/scala/collection/immutable/WrappedString.scala | Scala | apache-2.0 | 1,907 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
/**
* Spark's broadcast variables, used to broadcast immutable datasets to all nodes.
*/
package object broadcast {
// For package docs only
}
| yelshater/hadoop-2.3.0 | spark-core_2.10-1.0.0-cdh5.1.0/src/main/scala/org/apache/spark/broadcast/package.scala | Scala | apache-2.0 | 975 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.wordspec
import org.scalatest._
import org.scalatest.SharedHelpers.{EventRecordingReporter, thisLineNumber}
import scala.concurrent.{Promise, ExecutionContext, Future}
import org.scalatest.concurrent.SleepHelper
import org.scalatest.events.{InfoProvided, MarkupProvided}
import org.scalatest.exceptions.{NotAllowedException, DuplicateTestNameException}
import org.scalactic.Prettifier
import scala.util.Success
import org.scalatest.funspec.AnyFunSpec
import org.scalatest.wordspec.AsyncWordSpec
class AsyncWordSpecSpec extends AnyFunSpec {
private val prettifier = Prettifier.default
describe("AsyncWordSpec") {
it("can be used for tests that return Future under parallel async test execution") {
class ExampleSpec extends AsyncWordSpec with ParallelTestExecution {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
val a = 1
"test 1" in {
Future {
assert(a == 1)
}
}
"test 2" in {
Future {
assert(a == 2)
}
}
"test 3" in {
Future {
pending
}
}
"test 4" in {
Future {
cancel()
}
}
"test 5" ignore {
Future {
cancel()
}
}
override def newInstance = new ExampleSpec
}
val rep = new EventRecordingReporter
val spec = new ExampleSpec
val status = spec.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "test 5")
}
it("can be used for tests that did not return Future under parallel async test execution") {
class ExampleSpec extends AsyncWordSpec with ParallelTestExecution {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
val a = 1
"test 1" in {
assert(a == 1)
}
"test 2" in {
assert(a == 2)
}
"test 3" in {
pending
}
"test 4" in {
cancel()
}
"test 5" ignore {
cancel()
}
override def newInstance = new ExampleSpec
}
val rep = new EventRecordingReporter
val spec = new ExampleSpec
val status = spec.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 4)
assert(rep.testSucceededEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testFailedEventsReceived.length == 1)
assert(rep.testFailedEventsReceived(0).testName == "test 2")
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "test 3")
assert(rep.testCanceledEventsReceived.length == 1)
assert(rep.testCanceledEventsReceived(0).testName == "test 4")
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "test 5")
}
it("can be used with is for pending tests that don't return a Future") {
class ExampleSpec extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
val a = 1
"test 1" is {
pending
}
"test 2" ignore {
pending
}
}
val rep = new EventRecordingReporter
val spec = new ExampleSpec
val status = spec.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 1)
assert(rep.testSucceededEventsReceived.length == 0)
assert(rep.testFailedEventsReceived.length == 0)
assert(rep.testPendingEventsReceived.length == 1)
assert(rep.testPendingEventsReceived(0).testName == "test 1")
assert(rep.testCanceledEventsReceived.length == 0)
assert(rep.testIgnoredEventsReceived.length == 1)
assert(rep.testIgnoredEventsReceived(0).testName == "test 2")
}
it("should run tests that return Future in serial by default") {
@volatile var count = 0
class ExampleSpec extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
"test 1" in {
Future {
SleepHelper.sleep(30)
assert(count == 0)
count = 1
succeed
}
}
"test 2" in {
Future {
assert(count == 1)
SleepHelper.sleep(50)
count = 2
succeed
}
}
"test 3" in {
Future {
assert(count == 2)
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived.length == 3)
}
it("should run tests that does not return Future in serial by default") {
@volatile var count = 0
class ExampleSpec extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
"test 1" in {
SleepHelper.sleep(3000)
assert(count == 0)
count = 1
succeed
}
"test 2" in {
assert(count == 1)
SleepHelper.sleep(5000)
count = 2
succeed
}
"test 3" in {
assert(count == 2)
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived.length == 3)
}
// SKIP-SCALATESTJS,NATIVE-START
it("should run tests and its future in same main thread when use SerialExecutionContext") {
var mainThread = Thread.currentThread
var test1Thread: Option[Thread] = None
var test2Thread: Option[Thread] = None
var onCompleteThread: Option[Thread] = None
class ExampleSpec extends AsyncWordSpec {
"test 1" in {
Future {
test1Thread = Some(Thread.currentThread)
succeed
}
}
"test 2" in {
Future {
test2Thread = Some(Thread.currentThread)
succeed
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
status.whenCompleted { s =>
onCompleteThread = Some(Thread.currentThread)
}
status.waitUntilCompleted()
assert(test1Thread.isDefined)
assert(test1Thread.get == mainThread)
assert(test2Thread.isDefined)
assert(test2Thread.get == mainThread)
assert(onCompleteThread.isDefined)
assert(onCompleteThread.get == mainThread)
}
it("should run tests and its true async future in the same thread when use SerialExecutionContext") {
var mainThread = Thread.currentThread
@volatile var test1Thread: Option[Thread] = None
@volatile var test2Thread: Option[Thread] = None
var onCompleteThread: Option[Thread] = None
class ExampleSpec extends AsyncWordSpec {
"test 1" in {
val promise = Promise[Assertion]
val timer = new java.util.Timer
timer.schedule(
new java.util.TimerTask {
def run(): Unit = {
promise.complete(Success(succeed))
}
},
1000
)
promise.future.map { s =>
test1Thread = Some(Thread.currentThread)
s
}
}
"test 2" in {
val promise = Promise[Assertion]
val timer = new java.util.Timer
timer.schedule(
new java.util.TimerTask {
def run(): Unit = {
promise.complete(Success(succeed))
}
},
500
)
promise.future.map { s =>
test2Thread = Some(Thread.currentThread)
s
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
status.whenCompleted { s =>
onCompleteThread = Some(Thread.currentThread)
}
status.waitUntilCompleted()
assert(test1Thread.isDefined)
assert(test1Thread.get == mainThread)
assert(test2Thread.isDefined)
assert(test2Thread.get == mainThread)
assert(onCompleteThread.isDefined)
assert(onCompleteThread.get == mainThread)
}
it("should not run out of stack space with nested futures when using SerialExecutionContext") {
class ExampleSpec extends AsyncWordSpec {
// Note we get a StackOverflowError with the following execution
// context.
// override implicit def executionContext: ExecutionContext = new ExecutionContext { def execute(runnable: Runnable) = runnable.run; def reportFailure(cause: Throwable) = () }
def sum(xs: List[Int]): Future[Int] =
xs match {
case Nil => Future.successful(0)
case x :: xs => Future(x).flatMap(xx => sum(xs).map(xxx => xx + xxx))
}
"test 1" in {
val fut: Future[Int] = sum((1 to 50000).toList)
fut.map(total => assert(total == 1250025000))
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
status.waitUntilCompleted()
assert(!rep.testSucceededEventsReceived.isEmpty)
}
// SKIP-SCALATESTJS,NATIVE-END
it("should run tests that returns Future and report their result in serial") {
class ExampleSpec extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
"test 1" in {
Future {
SleepHelper.sleep(60)
succeed
}
}
"test 2" in {
Future {
SleepHelper.sleep(30)
succeed
}
}
"test 3" in {
Future {
succeed
}
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testStartingEventsReceived(0).testName == "test 1")
assert(rep.testStartingEventsReceived(1).testName == "test 2")
assert(rep.testStartingEventsReceived(2).testName == "test 3")
assert(rep.testSucceededEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testSucceededEventsReceived(1).testName == "test 2")
assert(rep.testSucceededEventsReceived(2).testName == "test 3")
}
it("should run tests that does not return Future and report their result in serial") {
class ExampleSpec extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
"test 1" in {
SleepHelper.sleep(60)
succeed
}
"test 2" in {
SleepHelper.sleep(30)
succeed
}
"test 3" in {
succeed
}
}
val rep = new EventRecordingReporter
val suite = new ExampleSpec
val status = suite.run(None, Args(reporter = rep))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(rep.testStartingEventsReceived.length == 3)
assert(rep.testStartingEventsReceived(0).testName == "test 1")
assert(rep.testStartingEventsReceived(1).testName == "test 2")
assert(rep.testStartingEventsReceived(2).testName == "test 3")
assert(rep.testSucceededEventsReceived.length == 3)
assert(rep.testSucceededEventsReceived(0).testName == "test 1")
assert(rep.testSucceededEventsReceived(1).testName == "test 2")
assert(rep.testSucceededEventsReceived(2).testName == "test 3")
}
it("should send an InfoProvided event for an info in main spec body") {
class MySuite extends AsyncWordSpec {
info(
"hi there"
)
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val infoList = reporter.infoProvidedEventsReceived
assert(infoList.size == 1)
assert(infoList(0).message == "hi there")
}
it("should send an InfoProvided event for an info in scope body") {
class MySuite extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
"test feature" should {
info(
"hi there"
)
"test 1" in { succeed }
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val infoList = reporter.infoProvidedEventsReceived
assert(infoList.size == 1)
assert(infoList(0).message == "hi there")
}
it("should send an InfoProvided event for an info in test body") {
class MySuite extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
"test feature" should {
"test 1" in {
info("hi there")
succeed
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val infoList = reporter.infoProvidedEventsReceived
assert(infoList.size == 0)
val testSucceededList = reporter.testSucceededEventsReceived
assert(testSucceededList.size == 1)
assert(testSucceededList(0).recordedEvents.size == 1)
val recordedEvent = testSucceededList(0).recordedEvents(0)
assert(recordedEvent.isInstanceOf[InfoProvided])
val infoProvided = recordedEvent.asInstanceOf[InfoProvided]
assert(infoProvided.message == "hi there")
}
it("should send an InfoProvided event for an info in Future returned by test body") {
class MySuite extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
"test feature" should {
"test 1" in {
Future {
info("hi there")
succeed
}
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val infoList = reporter.infoProvidedEventsReceived
assert(infoList.size == 0)
val testSucceededList = reporter.testSucceededEventsReceived
assert(testSucceededList.size == 1)
assert(testSucceededList(0).recordedEvents.size == 1)
val recordedEvent = testSucceededList(0).recordedEvents(0)
assert(recordedEvent.isInstanceOf[InfoProvided])
val infoProvided = recordedEvent.asInstanceOf[InfoProvided]
assert(infoProvided.message == "hi there")
}
it("should send a NoteProvided event for a note in main spec body") {
class MySuite extends AsyncWordSpec {
note(
"hi there"
)
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val noteList = reporter.noteProvidedEventsReceived
assert(noteList.size == 1)
assert(noteList(0).message == "hi there")
}
it("should send a NoteProvided event for a note in scope body") {
class MySuite extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
"test feature" should {
note(
"hi there"
)
"test 1" in { succeed }
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val noteList = reporter.noteProvidedEventsReceived
assert(noteList.size == 1)
assert(noteList(0).message == "hi there")
}
it("should send a NoteProvided event for a note in test body") {
class MySuite extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
"test feature" should {
"test 1" in {
note("hi there")
succeed
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val noteList = reporter.noteProvidedEventsReceived
assert(noteList.size == 1)
assert(noteList(0).message == "hi there")
}
it("should send a NoteProvided event for a note in Future returned by test body") {
class MySuite extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
"test feature" should {
"test 1" in {
Future {
note("hi there")
succeed
}
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val noteList = reporter.noteProvidedEventsReceived
assert(noteList.size == 1)
assert(noteList(0).message == "hi there")
}
it("should send an AlertProvided event for an alert in main spec body") {
class MySuite extends AsyncWordSpec {
alert(
"hi there"
)
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val alertList = reporter.alertProvidedEventsReceived
assert(alertList.size == 1)
assert(alertList(0).message == "hi there")
}
it("should send an AlertProvided event for an alert in scope body") {
class MySuite extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
"test feature" should {
alert(
"hi there"
)
"test 1" in { succeed }
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val alertList = reporter.alertProvidedEventsReceived
assert(alertList.size == 1)
assert(alertList(0).message == "hi there")
}
it("should send an AlertProvided event for an alert in test body") {
class MySuite extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
"test feature" should {
"test 1" in {
alert("hi there")
succeed
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val alertList = reporter.alertProvidedEventsReceived
assert(alertList.size == 1)
assert(alertList(0).message == "hi there")
}
it("should send an AlertProvided event for an alert in Future returned by test body") {
class MySuite extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
"test feature" should {
"test 1" in {
Future {
alert("hi there")
succeed
}
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val alertList = reporter.alertProvidedEventsReceived
assert(alertList.size == 1)
assert(alertList(0).message == "hi there")
}
it("should send a MarkupProvided event for a markup in main spec body") {
class MySuite extends AsyncWordSpec {
markup(
"hi there"
)
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val markupList = reporter.markupProvidedEventsReceived
assert(markupList.size == 1)
assert(markupList(0).text == "hi there")
}
it("should send a MarkupProvided event for a markup in scope body") {
class MySuite extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
"test feature" should {
markup(
"hi there"
)
"test 1" in { succeed }
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val markupList = reporter.markupProvidedEventsReceived
assert(markupList.size == 1)
assert(markupList(0).text == "hi there")
}
it("should send a MarkupProvided event for a markup in test body") {
class MySuite extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
"test feature" should {
"test 1" in {
markup("hi there")
succeed
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val markupList = reporter.markupProvidedEventsReceived
assert(markupList.size == 0)
val testSucceededList = reporter.testSucceededEventsReceived
assert(testSucceededList.size == 1)
assert(testSucceededList(0).recordedEvents.size == 1)
val recordedEvent = testSucceededList(0).recordedEvents(0)
assert(recordedEvent.isInstanceOf[MarkupProvided])
val markupProvided = recordedEvent.asInstanceOf[MarkupProvided]
assert(markupProvided.text == "hi there")
}
it("should send a MarkupProvided event for a markup in Future returned by test body") {
class MySuite extends AsyncWordSpec {
//SCALATESTJS-ONLY implicit override def executionContext = org.scalatest.concurrent.TestExecutionContext.runNow
"test feature" should {
"test 1" in {
Future {
markup("hi there")
succeed
}
}
}
}
val suite = new MySuite
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
val markupList = reporter.markupProvidedEventsReceived
assert(markupList.size == 0)
val testSucceededList = reporter.testSucceededEventsReceived
assert(testSucceededList.size == 1)
assert(testSucceededList(0).recordedEvents.size == 1)
val recordedEvent = testSucceededList(0).recordedEvents(0)
assert(recordedEvent.isInstanceOf[MarkupProvided])
val markupProvided = recordedEvent.asInstanceOf[MarkupProvided]
assert(markupProvided.text == "hi there")
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside when") {
class TestSpec extends AsyncWordSpec {
"a feature" when {
"test 1" in { succeed }
"test 1" in { succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("AsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInWhenClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature when test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature when test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside shorthand when") {
class TestSpec extends AsyncWordSpec {
"a feature" when {
//DOTTY-ONLY ()
}
it when {
"test 1" in { succeed }
"test 1" in { succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("AsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInWhenClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature when test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature when test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside should") {
class TestSpec extends AsyncWordSpec {
"a feature" should {
"test 1" in { succeed }
"test 1" in { succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("AsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInShouldClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature should test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature should test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside shorthand should") {
class TestSpec extends AsyncWordSpec {
"a feature" should {
//DOTTY-ONLY ()
}
it should {
"test 1" in { succeed }
"test 1" in { succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("AsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInShouldClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature should test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature should test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside must") {
class TestSpec extends AsyncWordSpec {
"a feature" must {
"test 1" in { succeed }
"test 1" in { succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("AsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInMustClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature must test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature must test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside shorthand must") {
class TestSpec extends AsyncWordSpec {
"a feature" must {
//DOTTY-ONLY ()
}
it must {
"test 1" in { succeed }
"test 1" in { succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("AsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInMustClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature must test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature must test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside that") {
class TestSpec extends AsyncWordSpec {
"a feature" that {
"test 1" in { succeed }
"test 1" in { succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("AsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInThatClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature that", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature that test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature that test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside which") {
class TestSpec extends AsyncWordSpec {
"a feature" which {
"test 1" in { succeed }
"test 1" in { succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("AsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInWhichClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature which", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature which test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature which test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside can") {
class TestSpec extends AsyncWordSpec {
"a feature" can {
"test 1" in { succeed }
"test 1" in { succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("AsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInCanClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature can test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature can test 1")))
}
it("should throw NotAllowedException wrapping a DuplicateTestNameException when duplicate test name is detected inside shorthand can") {
class TestSpec extends AsyncWordSpec {
"a feature" can {
//DOTTY-ONLY ()
}
it can {
"test 1" in { succeed }
"test 1" in { succeed }
}
}
val e = intercept[NotAllowedException] {
new TestSpec
}
assert("AsyncWordSpecSpec.scala" == e.failedCodeFileName.get)
assert(e.failedCodeLineNumber.get == thisLineNumber - 7)
assert(e.cause.isDefined)
val causeThrowable = e.cause.get
assert(e.message == Some(FailureMessages.exceptionWasThrownInCanClause(prettifier, UnquotedString(causeThrowable.getClass.getName), "a feature", FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature can test 1")))))
assert(causeThrowable.isInstanceOf[DuplicateTestNameException])
val cause = causeThrowable.asInstanceOf[DuplicateTestNameException]
assert(cause.getMessage == FailureMessages.duplicateTestName(prettifier, UnquotedString("a feature can test 1")))
}
it("should allow other execution context to be used") {
class TestSpec extends AsyncWordSpec {
// SKIP-SCALATESTJS,NATIVE-START
override implicit val executionContext = scala.concurrent.ExecutionContext.Implicits.global
// SKIP-SCALATESTJS,NATIVE-END
// SCALATESTJS-ONLY override implicit val executionContext = scala.scalajs.concurrent.JSExecutionContext.runNow
val a = 1
"feature 1" should {
"test A" in {
Future { assert(a == 1) }
}
}
"feature 2" should {
"test B" in {
Future { assert(a == 1) }
}
}
"feature 3" should {
"test C" in {
Future { assert(a == 1) }
}
}
}
val suite = new TestSpec
val reporter = new EventRecordingReporter
val status = suite.run(None, Args(reporter))
// SKIP-SCALATESTJS,NATIVE-START
status.waitUntilCompleted()
// SKIP-SCALATESTJS,NATIVE-END
assert(reporter.scopeOpenedEventsReceived.length == 3)
assert(reporter.scopeClosedEventsReceived.length == 3)
assert(reporter.testStartingEventsReceived.length == 3)
assert(reporter.testSucceededEventsReceived.length == 3)
}
}
}
| scalatest/scalatest | jvm/wordspec-test/src/test/scala/org/scalatest/wordspec/AsyncWordSpecSpec.scala | Scala | apache-2.0 | 39,929 |
package root
import sbt._
import Keys._
import com.typesafe.sbt.osgi.OsgiKeys
import OsgiKeys._
import root.libraries._
import org.openmole.buildsystem.OMKeys._
import org.scalajs.sbtplugin.ScalaJSPlugin
import org.scalajs.sbtplugin.ScalaJSPlugin.autoImport._
/**
* Created with IntelliJ IDEA.
* User: luft
* Date: 3/17/13
* Time: 6:50 PM
* To change this template use File | Settings | File Templates.
*/
object OSGi extends Defaults(Apache) {
val dir = file("target/libraries")
lazy val jetty = OsgiProject(
"org.eclipse.jetty",
exports = Seq("org.eclipse.jetty.*", "javax.*")) settings(
libraryDependencies ++= Seq("org.eclipse.jetty" % "jetty-webapp" % "8.1.8.v20121106", "org.eclipse.jetty.orbit" % "javax.servlet" % "3.0.0.v201112011016"),
version := "8.1.8.v20121106"
)
lazy val scalatraVersion = "2.3.0"
lazy val scalatra = OsgiProject("org.scalatra",
dynamicImports = Seq("*"),
exports = Seq("org.scalatra.*, org.fusesource.*"),
privatePackages = Seq("!scala.*", "!org.slf4j.*", "!org.json4s", "*")) settings
(libraryDependencies ++= Seq("org.scalatra" %% "scalatra" % scalatraVersion,
"org.scalatra" %% "scalatra-json" % scalatraVersion), version := scalatraVersion) dependsOn (slf4j)
lazy val scalate = OsgiProject("scalate", exports = Seq("org.scalatra.*")) settings
(libraryDependencies += "org.scalatra" %% "scalatra-scalate" % scalatraVersion, version := scalatraVersion)
lazy val jacksonJson = OsgiProject("org.json4s") settings(
libraryDependencies += "org.json4s" %% "json4s-jackson" % "3.2.9",
exportPackage += "com.fasterxml.*",
version := "3.2.9"
)
lazy val logback = OsgiProject("ch.qos.logback", exports = Seq("ch.qos.logback.*", "org.slf4j.impl")) settings
(libraryDependencies += "ch.qos.logback" % "logback-classic" % "1.0.9", version := "1.0.9")
lazy val h2Version = "1.3.176"
lazy val h2 = OsgiProject("org.h2", dynamicImports = Seq("*"), privatePackages = Seq("META-INF.*")) settings
(libraryDependencies += "com.h2database" % "h2" % h2Version, version := h2Version)
lazy val bonecp = OsgiProject("com.jolbox.bonecp", dynamicImports = Seq("*")) settings
(libraryDependencies += "com.jolbox" % "bonecp" % "0.8.0-rc1", version := "0.8.0-rc1")
lazy val slickVersion = "2.1.0"
lazy val slick = OsgiProject("com.typesafe.slick", exports = Seq("scala.slick.*")) settings
(libraryDependencies += "com.typesafe.slick" %% "slick" % slickVersion, version := slickVersion)
lazy val slf4j = OsgiProject("org.slf4j") settings(
libraryDependencies += "org.slf4j" % "slf4j-api" % "1.7.10",
version := "1.7.10"
)
lazy val xstream = OsgiProject(
"com.thoughtworks.xstream",
dynamicImports = Seq("*"),
privatePackages = Seq("!scala.*", "*")) settings(
libraryDependencies ++= Seq("com.thoughtworks.xstream" % "xstream" % "1.4.8", "net.sf.kxml" % "kxml2" % "2.3.0"),
version := "1.4.8",
bundleType += "dbserver")
lazy val groovy = OsgiProject(
"org.codehaus.groovy",
dynamicImports = Seq("*"),
exports = Seq("groovy.*", "org.codehaus.*"),
privatePackages = Seq("!scala.*,*")) settings(
libraryDependencies ++= Seq("org.codehaus.groovy" % "groovy-all" % "2.4.1", "org.fusesource.jansi" % "jansi" % "1.11"),
version := "2.4.1"
)
lazy val scalaLang = OsgiProject("org.scala-lang.scala-library", exports = Seq("akka.*", "com.typesafe.*", "scala.*", "scalax.*", "jline.*"),
privatePackages = Seq("*"), dynamicImports = Seq("*")) settings
(libraryDependencies <++= (scalaVersion) { sV ⇒
Seq("org.scala-lang" % "scala-library" % sV,
"org.scala-lang" % "scala-reflect" % sV,
"jline" % "jline" % "2.12.1",
"com.typesafe.akka" %% "akka-actor" % "2.3.9",
"com.typesafe.akka" %% "akka-transactor" % "2.3.9",
"com.typesafe" % "config" % "1.2.1",
"com.github.scala-incubator.io" %% "scala-io-core" % "0.4.3",
"org.scala-lang" % "scala-compiler" % sV
)
}, version := scalaVersion.value)
lazy val jodaTime = OsgiProject("org.joda.time") settings(
libraryDependencies += "joda-time" % "joda-time" % "1.6",
version := "1.6"
)
lazy val jasyptVersion = "1.9.2"
lazy val jasypt = OsgiProject("org.jasypt.encryption", exports = Seq("org.jasypt.*")) settings(
libraryDependencies += "org.jasypt" % "jasypt" % jasyptVersion,
version := jasyptVersion
)
lazy val netlogo4_noscala = OsgiProject("ccl.northwestern.edu.netlogo4.noscala", exports = Seq("org.nlogo.*"),
privatePackages = Seq("!scala.*", "*")) settings
(libraryDependencies ++=
Seq("ccl.northwestern.edu" % "netlogo" % "4.1.3",
"org.picocontainer" % "picocontainer" % "2.8",
"org.objectweb" % "asm" % "3.1",
"org.objectweb" % "asm-commons" % "3.1"), version := "4.1.3", autoScalaLibrary := false, bundleType := Set("all"), scalaVersion := "2.8.0", crossPaths := false,
ivyScala ~= { (is: Option[IvyScala]) ⇒ //should disable the binary compat warnings this causes
for (i ← is) yield i.copy(checkExplicit = false)
})
lazy val netlogo4 = OsgiProject("ccl.northwestern.edu.netlogo4", exports = Seq("org.nlogo.*"),
privatePackages = Seq("*")) settings
(libraryDependencies ++=
Seq("ccl.northwestern.edu" % "netlogo" % "4.1.3",
"org.picocontainer" % "picocontainer" % "2.8",
"org.objectweb" % "asm" % "3.1",
"org.objectweb" % "asm-commons" % "3.1"), version := "4.1.3", scalaVersion := "2.8.0", crossPaths := false, bundleType := Set("plugin"))
lazy val netLogo5Version = "5.1.0"
lazy val netlogo5_noscala = OsgiProject("ccl.northwestern.edu.netlogo5.noscala", exports = Seq("org.nlogo.*"),
privatePackages = Seq("!scala.*", "*")) settings
(libraryDependencies ++=
Seq("ccl.northwestern.edu" % "netlogo" % netLogo5Version,
"org.objectweb" % "asm-all" % "3.3.1",
"org.picocontainer" % "picocontainer" % "2.13.6"), version := netLogo5Version, autoScalaLibrary := false, bundleType := Set("all"), scalaVersion := "2.9.2", crossPaths := false,
ivyScala ~= { (is: Option[IvyScala]) ⇒ //See netlogo4_noscala
for (i ← is) yield i.copy(checkExplicit = false)
})
lazy val netlogo5 = OsgiProject("ccl.northwestern.edu.netlogo5", exports = Seq("org.nlogo.*"),
privatePackages = Seq("*")) settings
(libraryDependencies ++= Seq("ccl.northwestern.edu" % "netlogo" % netLogo5Version,
"org.scala-lang" % "scala-library" % "2.9.2",
"org.objectweb" % "asm-all" % "3.3.1",
"org.picocontainer" % "picocontainer" % "2.13.6"), version := netLogo5Version, scalaVersion := "2.9.2", crossPaths := false, bundleType := Set("plugin"))
lazy val guava = OsgiProject("com.google.guava",
exports = Seq("com.google.common.*"), privatePackages = Seq("!scala.*", "*")) settings(libraryDependencies ++=
Seq("com.google.guava" % "guava" % "18.0", "com.google.code.findbugs" % "jsr305" % "1.3.9"),
version := "18.0"
)
lazy val scalaTagsVersion = "0.4.6"
lazy val scalaRxVersion = "0.2.8"
lazy val scalaDomVersion = "0.8.0"
lazy val scalaJQueryVersion = "0.8.0"
lazy val scalaUpickleVersion = "0.2.7"
lazy val scalaAutowireVersion = "0.2.5"
lazy val scalajsVersion = "0.6.1"
lazy val jsSuffix = "_sjs0.6"
lazy val scalajsDom = OsgiProject("scalajs-dom", exports = Seq("org.scalajs.dom.*")) settings(
libraryDependencies += "org.scala-js" %%% ("scalajs-dom" + jsSuffix) % scalaDomVersion, version := scalaDomVersion)
lazy val scalajsQuery = OsgiProject("scalajs-jquery", exports = Seq("org.scalajs.jquery.*")) settings(
libraryDependencies += "be.doeraene" %%% ("scalajs-jquery" + jsSuffix) % scalaJQueryVersion, version := scalaJQueryVersion)
lazy val scalajsTools = OsgiProject("scalajs-tools", exports = Seq("org.scalajs.core.tools.*", "org.scalajs.core.ir.*", "com.google.javascript.*", "com.google.common.*", "rhino_ast.java.com.google.javascript.rhino.*", "org.json.*")) settings(
libraryDependencies += "org.scala-js" %% "scalajs-tools" % scalajsVersion, version := scalajsVersion)
lazy val scalajsLibrary = OsgiProject("scalajs-library", exports = Seq("scala.scalajs.*","*.sjsir")) settings(
libraryDependencies += "org.scala-js" %% "scalajs-library" % scalajsVersion, version := scalajsVersion)
lazy val scalaTags = OsgiProject("com.scalatags", exports = Seq("scalatags.*", "*.sjsir")) settings(
libraryDependencies ++= Seq("com.lihaoyi" %% "scalatags" % scalaTagsVersion,
"com.lihaoyi" %%% ("scalatags" + jsSuffix) % scalaTagsVersion),
version := scalaTagsVersion
)
lazy val rx = OsgiProject("rx", exports = Seq("rx.*", "*.sjsir")) settings(
libraryDependencies ++= Seq("com.lihaoyi" %% "scalarx" % scalaRxVersion,
"com.lihaoyi" %%% ("scalarx" + jsSuffix) % scalaRxVersion),
version := scalaRxVersion
)
lazy val upickle = OsgiProject("upickle", exports = Seq("upickle.*", "jawn.*", "*.sjsir")) settings(
libraryDependencies ++= Seq("com.lihaoyi" %% "upickle" % scalaUpickleVersion,
"com.lihaoyi" %%% ("upickle" + jsSuffix) % scalaUpickleVersion),
version := scalaUpickleVersion
)
lazy val autowire = OsgiProject("autowire", exports = Seq("autowire.*", "*.sjsir")) settings(
libraryDependencies ++= Seq("com.lihaoyi" %% "autowire" % scalaAutowireVersion,
"com.lihaoyi" %%% ("autowire" + jsSuffix) % scalaAutowireVersion),
version := scalaAutowireVersion
)
lazy val jawnVersion = "0.6.0"
lazy val jawn = OsgiProject("jawn", exports = Seq("jawn.*", "utf8.json")) settings(
libraryDependencies += "org.spire-math" %% "jawn-parser" % jawnVersion, version := jawnVersion)
lazy val scaladgetVersion = "0.3.0"
lazy val scaladget = OsgiProject("scaladget", exports = Seq("fr.iscpif.scaladget.*", "*.sjsir")) settings(
libraryDependencies += "fr.iscpif" %%% ("scaladget" + jsSuffix) % scaladgetVersion, version := scaladgetVersion)
lazy val jsonSimpleVersion = "1.1.1"
lazy val jsonSimple = OsgiProject("json-simple", exports = Seq("org.json.simple.*")) settings(
libraryDependencies += "com.googlecode.json-simple" % "json-simple" % jsonSimpleVersion, version := jsonSimpleVersion)
lazy val closureCompilerVersion = "v20130603"
lazy val closureCompiler = OsgiProject("closure-compiler", exports = Seq("com.google.javascript.*")) settings(
libraryDependencies += "com.google.javascript" % "closure-compiler" % closureCompilerVersion, version := closureCompilerVersion)
lazy val mgoVersion = "1.79"
lazy val mgo = OsgiProject("fr.iscpif.mgo") settings(
libraryDependencies += "fr.iscpif" %% "mgo" % mgoVersion,
version := mgoVersion
)
val monocleVersion = "1.0.1"
lazy val monocle = OsgiProject("monocle", privatePackages = Seq("!scala.*", "*")) settings(
libraryDependencies += "com.github.julien-truffaut" %% "monocle-core" % monocleVersion,
libraryDependencies += "com.github.julien-truffaut" %% "monocle-generic" % monocleVersion,
libraryDependencies += "com.github.julien-truffaut" %% "monocle-macro" % monocleVersion,
version := monocleVersion
)
lazy val familyVersion = "1.0"
lazy val family = OsgiProject("fr.iscpif.family") settings(
libraryDependencies += "fr.iscpif" %% "family" % familyVersion,
version := familyVersion
)
lazy val opencsv = OsgiProject("au.com.bytecode.opencsv") settings(
libraryDependencies += "net.sf.opencsv" % "opencsv" % "2.3",
version := "2.3"
)
lazy val arm = OsgiProject("com.jsuereth.scala-arm") settings(
libraryDependencies += "com.jsuereth" %% "scala-arm" % "1.4",
version := "1.4",
exportPackage := Seq("resource.*"))
lazy val scalajHttp = OsgiProject("org.scalaj.scalaj-http") settings(
libraryDependencies += "org.scalaj" %% "scalaj-http" % "0.3.15",
version := "0.3.15",
exportPackage := Seq("scalaj.http.*")
)
lazy val scopt = OsgiProject("com.github.scopt", exports = Seq("scopt.*")) settings(
libraryDependencies += "com.github.scopt" %% "scopt" % "3.2.0",
version := "3.2.0"
)
lazy val scalabc = OsgiProject("fr.iscpif.scalabc", privatePackages = Seq("!scala.*", "!junit.*", "*")) settings(
libraryDependencies += "fr.iscpif" %% "scalabc" % "0.4",
version := "0.4"
)
lazy val scalatexSite =
OsgiProject("com.lihaoyi.scalatex-site", exports = Seq("scalatex.*", "ammonite.*", "scalatags.*"), privatePackages = Seq("!scala.*", "*")) settings (
libraryDependencies += "com.lihaoyi" %% "scalatex-site" % "0.1.1",
version := "0.1.1"
)
}
| ISCPIF/PSEExperiments | openmole-src/libraries/project/src/main/scala/OSGi.scala | Scala | agpl-3.0 | 12,644 |
package eu.brosbit.opos.snippet.view
import _root_.net.liftweb.util._
import Helpers._
import net.liftweb.json.JsonDSL._
import eu.brosbit.opos.lib.Formater
import java.util.Date
import eu.brosbit.opos.model.edu._
import net.liftweb.http.{SHtml, S}
import scala.xml.Unparsed
import net.liftweb.json.DefaultFormats
import net.liftweb.json.JsonParser._
class PerformExamSn extends BaseSnippet {
var nr = 0
val id = S.param("id").getOrElse("")
val exam = Exam.find(id).getOrElse(Exam.create)
val multi = exam.multi
val exAnsList = ExamAnswer.findAll(("exam" -> exam._id.toString) ~ ("authorId" -> user.id.get))
val exAns = if(exAnsList.isEmpty) ExamAnswer.create
else exAnsList.head
def oneExam():CssSel = {
var enterCode = ""
def saveCode() {
if(checkCode(enterCode)) {
exAns.code = enterCode
exAns.exam = exam._id
exAns.authorId = user.id.get
exAns.authorName = user.getFullName
exAns.save
}
else {
S.notice("Nieprawidłowy kod")
S.redirectTo("/view/showquiz/" + exam._id.toString)
}
}
if(exam.quizzes.length > 1 && exAns.code.isEmpty) {
"#code" #> SHtml.text(enterCode, enterCode = _) &
"#attachFrame" #> <span style="display:none;"></span> &
"#descript" #> <span style="display:none;"></span> &
"#subject" #> <span style="display:none;"></span> &
"#endTime *" #> Formater.formatDate(new Date(exam.end)) &
"#test" #> <span style="display:none;"></span> &
"#saveCode" #> SHtml.submit("Zatwierdź", saveCode)
}
else {
val idQuiz = if(exam.quizzes.isEmpty) "0" else exam.quizzes(getGroupInt).toString
val quiz = Quiz.find(idQuiz).getOrElse(Quiz.create)
if(quiz.questions.length < 1) S.redirectTo("/view/exams?Error")
//println("========= quiz: " + quiz.title + " ; length: " + quiz.questions.length.toString)
val questMap = quiz.questions.map(qi => ( qi.q.toString -> qi.p)).toMap
val questions = QuizQuestion.findAll("_id" -> ("$in" -> questMap.keySet.toList))
val questionsItems = questions.map(qu => (qu, questMap(qu._id.toString)))
//println("========= questions: " + questions.length.toString)
"form" #> <span style="display:none;"></span> &
"#descript *" #> exam.description &
"#subject *" #> exam.subjectName &
"#endTime *" #> Formater.formatDate(new Date(exam.end)) &
"#attachLink" #> (if(exam.attach) <input id="attachLink" type="text" readonly="readonly" value={exAns.attach}/>
else <span id=""></span>) &
"#test" #> questionsItems.map( q => "div" #> mkQuestHTML(q._1, q._2))
}
}
def getAnswers() = {
val answers = exAns.attach + "##;;@@!![" + exAns.answers.map(_.json).mkString(",") + "]"
//println("========= answers: " + answers)
"#answers" #> SHtml.ajaxText(answers, (data) => {
//println("========== get Answers acctions: " + data)
val arr = data.split("##;;@@!!")
val link = arr(0)
val json = arr(1)
exAns.answers = createFromJsonList(json)
exAns.exam = exam._id
exAns.attach = link
exAns.authorId = user.id.get
exAns.authorName = user.getFullName
if( (exam.end + 30000L) >= new Date().getTime) exAns.save
})
}
def setTime() = {
val now = new Date().getTime
val end = (exam.end - now) / 1000L
"#secondsToEnd *" #> end.toString
}
protected def mkQuestHTML(question:QuizQuestion, pkt:Int) = {
nr += 1
<section class="question" id={"_" + question._id.toString } name={"zad_" + nr.toString}>
<div class="panel panel-info">
<div class="panel-heading">
<span class="quizNr">{nr.toString}</span>
Zadanie <span class="badge" title="Punkty">{pkt.toString} pkt.</span></div>
<div class="panel-body">
<div class="questionText">
{Unparsed(question.question)}
</div>
<div class="form-group">{createAnswers(question.answers, question.fake)}</div>
</div>
</div>
</section>
}
protected def createAnswers(good:List[String], fake:List[String]) = {
val name = "quest_" + nr.toString
if(fake.isEmpty) {
if(good.nonEmpty) <input type="text" class="form-control" value="" name={name} />
else <textarea class="form-control" name={name} rows="10"></textarea>
}
else {
val aType = if(good.length > 1 || multi) "checkbox" else "radio"
val all = (fake ++ good).sortWith(_ < _) .map(s => <div class={aType}>
<label><input type={aType} value={s} name={name}/>
{Unparsed(s)} </label> </div>)
<div class="answerBox">{all}</div>
}
}
private def createFromJsonList(jsonStr: String) = {
var data:List[AnswerItem] = Nil
implicit val formats = DefaultFormats
try {
val json = parse(jsonStr)
data = json.extract[List[AnswerItem]]
} catch {case _ : Throwable => println("ERROR PARSER JSON PERFORMEXAMS")}
data
}
private def checkCode(c: String):Boolean = {
!findIfCodeExists(c) && exam.keys.exists(k => k == c)
}
//if code was already used
private def findIfCodeExists(c:String) =
ExamAnswer.findAll(("code" -> c)~("exam" -> exam._id.toString)).nonEmpty
private def getGroupInt = {
if(exAns.code.isEmpty) 0
else {
exAns.code.charAt(0).toInt -'A'.toInt
}
}
}
| mikolajs/osp | src/main/scala/eu/brosbit/opos/snippet/view/PerformExamSn.scala | Scala | agpl-3.0 | 5,417 |
package org.nexbook.tools.fixordergenerator.app
import java.util.concurrent.atomic.AtomicLong
import akka.actor.{ActorSystem, Props}
import com.typesafe.config.Config
import org.joda.time.{DateTime, DateTimeZone}
import org.nexbook.tools.fixordergenerator.fix.FixMessageSender.FixMessageWithSession
import org.nexbook.tools.fixordergenerator.fix.{FixConnector, FixMessageRouter}
import org.nexbook.tools.fixordergenerator.generator.{OrderCancelExecutor, OrderGenerator, PriceGenerator, SymbolGenerator}
import org.nexbook.tools.fixordergenerator.repository.{PriceRepository, PricesLoader}
import org.slf4j.{Logger, LoggerFactory}
import quickfix.field.{MsgType, TransactTime}
import quickfix.fix44.{NewOrderSingle, OrderCancelRequest}
import quickfix.{DataDictionary, Message, Session, SessionID}
import scala.io.Source
/**
* Created by milczu on 13.12.15
*/
trait RunningStrategy {
val actorSystem = ActorSystem("FixMessageSenderSystem")
val fixMessageSenderActor = actorSystem.actorOf(Props[FixMessageRouter](new FixMessageRouter(fixSessions) with FixConnector))
def startWork(): Unit
def logger: Logger
def fixSessions: List[Session]
def appConfig: AppConfig
}
trait OrderCountingGenerator {
val isLimitRestriction = generatorConfig.getBoolean("limit.limited")
val orderLimit = generatorConfig.getInt("limit.maxOrderCount")
def generatorConfig: Config
def orderCounter: AtomicLong
def canBeGeneratedNextOrder = !isLimitRestriction || orderCounter.get < orderLimit
}
trait OrderGeneratingRunningStrategy extends RunningStrategy with OrderCountingGenerator {
val priceGenerator = new PriceGenerator(new PriceRepository(new PricesLoader(appConfig.supportedSymbols).loadCurrentPrices))
val orderGenerator = new OrderGenerator(new SymbolGenerator(appConfig.supportedSymbols), priceGenerator)
val orderCounter: AtomicLong = new AtomicLong
val orderCancelExecutor = actorSystem.actorOf(Props(new OrderCancelExecutor(actorSystem, fixMessageSenderActor, appConfig.generatorConfig, orderCounter)), "orderCancelExecutor")
def appConfig: AppConfig
def generateAndPublishOrder(session: Session) = {
val order = orderGenerator.generate()
orderCounter.incrementAndGet
val msg = FixMessageWithSession(order, session)
fixMessageSenderActor ! msg
orderCancelExecutor ! msg
}
}
class AkkaNewOrderGeneratingStrategy(val fixSessions: List[Session], val appConfig: AppConfig) extends OrderGeneratingRunningStrategy {
this: FixConnector =>
override val logger: Logger = LoggerFactory.getLogger(classOf[AkkaNewOrderGeneratingStrategy])
def generatorConfig = appConfig.generatorConfig
override def startWork() = {
waitForLogon()
while (canBeGeneratedNextOrder) {
for (session <- loggedSessions) {
if (canBeGeneratedNextOrder) {
generateAndPublishOrder(session)
}
}
}
if (canBeGeneratedNextOrder) {
startWork()
} else {
logger.debug("Waiting for send messages over FIX")
waitForSendMessagesOverFix()
logger.debug("Waiting for send messages over FIX - FINISHED")
}
}
}
class FileBasedPublisherStrategy(val fixSessions: List[Session], val appConfig: AppConfig) extends RunningStrategy {
this: FixConnector =>
override val logger: Logger = LoggerFactory.getLogger(classOf[FileBasedPublisherStrategy])
val fileName = appConfig.fileBasedStrategyConfig.getString("msgFileName")
val dataDictionary = new DataDictionary("config/FIX44.xml")
override def startWork() = {
def toFixMessage(line: String): Message = new Message(line, dataDictionary, false)
def fixMsgToSpecializedMsg(msg: Message): Message = {
msg.getHeader.getField(new MsgType()).getValue match {
case NewOrderSingle.MSGTYPE =>
val newOrderSingle = new NewOrderSingle
newOrderSingle.fromString(msg.toString, dataDictionary, false)
newOrderSingle
case OrderCancelRequest.MSGTYPE =>
val orderCancelRequest = new OrderCancelRequest
orderCancelRequest.fromString(msg.toString, dataDictionary, false)
orderCancelRequest
case _ => msg
}
}
def withUpdatedFields(msg: Message): Message = msg.getHeader.getField(new MsgType()).getValue match {
case NewOrderSingle.MSGTYPE =>
val newOrderSingle: NewOrderSingle = msg.asInstanceOf[NewOrderSingle]
newOrderSingle.set(new TransactTime(DateTime.now(DateTimeZone.UTC).toDate))
newOrderSingle
case OrderCancelRequest.MSGTYPE =>
val orderCancelRequest: OrderCancelRequest = msg.asInstanceOf[OrderCancelRequest]
orderCancelRequest.set(new TransactTime(DateTime.now(DateTimeZone.UTC).toDate))
orderCancelRequest
}
def resolveSession(message: Message): Session = {
def sessionID: SessionID = {
val header = message.getHeader
val beginString = header.getString(8)
val senderCompID = header.getString(49)
val targetCompID = header.getString(56)
val qualifier = ""
new SessionID(beginString, senderCompID, targetCompID, qualifier)
}
Session.lookupSession(sessionID)
}
waitForLogon()
logger.info("All sessions logged. Reading FIX msgs from file")
val lines: List[String] = Source.fromFile(fileName).getLines.toList
logger.info(s"All sessions logged. Readed msgs: ${lines.size}")
val fixMsgs: List[FixMessageWithSession] = lines.map(toFixMessage).map(fixMsgToSpecializedMsg).map(withUpdatedFields).map(msg => FixMessageWithSession(msg, resolveSession(msg)))
for (fixMsg <- fixMsgs) {
fixMessageSenderActor ! fixMsg
}
waitForSendMessagesOverFix()
actorSystem.shutdown()
}
} | milczarekIT/fix-order-generator | src/main/scala/org/nexbook/tools/fixordergenerator/app/RunningStrategy.scala | Scala | apache-2.0 | 5,456 |
Subsets and Splits