code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
// scalac: -opt:l:inline -opt-inline-from:**
//
/*
* filter: optimizer warnings;
*/
import java.lang.Thread.holdsLock
import scala.collection.mutable.StringBuilder
object Util {
def checkLocks(held: AnyRef*)(notHeld: AnyRef*) = {
val sb = new StringBuilder
for (lock <- held) {
sb.append(if (holdsLock(lock)) '.' else '!')
}
print("%5s|" format sb)
sb.clear()
for (lock <- notHeld) {
sb.append(if (holdsLock(lock)) '!' else '.')
}
print("%-15s " format sb)
(held forall holdsLock) && !(notHeld exists holdsLock)
}
}
class C1 {
import Util._
val lock = new AnyRef
def f1 = synchronized { checkLocks(this)(this.getClass) }
@inline final def fi = synchronized { checkLocks(this)(this.getClass) }
val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass) }
def ff = {
lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass) }
ffv(this)
}
def fl = {
lazy val flv = synchronized { checkLocks(this)(this.getClass) }
flv
}
def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass) }
def fc = {
def fcf(f0: => Boolean) = synchronized { f0 }
fcf(checkLocks(this)(this.getClass))
}
def g1 = checkLocks()(this, this.getClass)
@inline final def gi = checkLocks()(this, this.getClass)
val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass)
def gf = {
lazy val gfv: AnyRef => Boolean = lock => checkLocks()(C1.this, gfv, gfv.getClass, lock, lock.getClass)
gfv(this)
}
def gl = {
lazy val glv = checkLocks()(this, this.getClass)
glv
}
class C {
def f1 = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
@inline final def fi = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass, fv, fv.getClass) }
def ff = {
lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, C1.this, C1.this.getClass) }
ffv(this)
}
def fl = {
lazy val flv = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
flv
}
def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, C1.this, C1.this.getClass) }
def fn = C1.this.synchronized { checkLocks(C1.this)(C1.this.getClass, this, this.getClass) }
def fc = {
def fcf(f0: => Boolean) = synchronized { f0 }
fcf(checkLocks(this)(this.getClass, C1.this, C1.this.getClass))
}
def g1 = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
@inline final def gi = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
val gv: () => Boolean = () => checkLocks()(this, this.getClass, C1.this, C1.this.getClass, gv, gv.getClass)
def gf = {
lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, C1.this, C1.this.getClass)
gfv(this)
}
def gl = {
lazy val glv = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
glv
}
}
val c = new C
object O {
def f1 = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
@inline final def fi = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, C1.this, C1.this.getClass) }
def ff = {
lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(lock.getClass, ffv, ffv.getClass, C1.this, C1.this.getClass) }
ffv(this)
}
def fl = {
lazy val flv = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) }
flv
}
def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, C1.this, C1.this.getClass) }
def fn = C1.this.synchronized { checkLocks(C1.this)(C1.this.getClass, this, this.getClass) }
def fc = {
def fcf(f0: => Boolean) = synchronized { f0 }
fcf(checkLocks(this)(this.getClass, C1.this, C1.this.getClass))
}
def g1 = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
@inline final def gi = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, C1.this, C1.this.getClass)
def gf = {
lazy val gfv: AnyRef => Boolean = lock => checkLocks()(lock, lock.getClass, gfv, gfv.getClass, C1.this, C1.this.getClass)
gfv(this)
}
def gl = {
lazy val glv = checkLocks()(this, this.getClass, C1.this, C1.this.getClass)
glv
}
}
}
object O1 {
import Util._
val lock = new AnyRef
def f1 = synchronized { checkLocks(this)(this.getClass) }
@inline final def fi = synchronized { checkLocks(this)(this.getClass) }
val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass) }
def ff = {
lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass) }
ffv(this)
}
def fl = {
lazy val flv = synchronized { checkLocks(this)(this.getClass) }
flv
}
def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass) }
def fc = {
def fcf(f0: => Boolean) = synchronized { f0 }
fcf(checkLocks(this)(this.getClass))
}
def g1 = checkLocks()(this, this.getClass)
@inline final def gi = checkLocks()(this, this.getClass)
val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass)
def gf = {
lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass)
gfv(this)
}
def gl = {
lazy val glv = checkLocks()(this, this.getClass)
glv
}
class C {
def f1 = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
@inline final def fi = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, O1, O1.getClass, fv, fv.getClass) }
def ff = {
lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, O1, O1.getClass) }
ffv(this)
}
def fl = {
lazy val flv = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
flv
}
def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, O1, O1.getClass) }
def fn = O1.synchronized { checkLocks(O1)(O1.getClass, this, this.getClass) }
def fc = {
def fcf(f0: => Boolean) = synchronized { f0 }
fcf(checkLocks(this)(this.getClass, O1, O1.getClass))
}
def g1 = checkLocks()(this, this.getClass, O1, O1.getClass)
@inline final def gi = checkLocks()(this, this.getClass, O1, O1.getClass)
val gv: () => Boolean = () => checkLocks()(this, this.getClass, O1, O1.getClass, gv, gv.getClass)
def gf = {
lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, O1, O1.getClass)
gfv(this)
}
def gl = {
lazy val glv = checkLocks()(this, this.getClass, O1, O1.getClass)
glv
}
}
val c = new C
object O {
def f1 = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
@inline final def fi = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, O1, O1.getClass) }
def ff = {
lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(lock.getClass, ffv, ffv.getClass, O1, O1.getClass) }
ffv(this)
}
def fl = {
lazy val flv = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) }
flv
}
def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, O1, O1.getClass) }
def fn = O1.synchronized { checkLocks(O1)(O1.getClass, this, this.getClass) }
def fc = {
def fcf(f0: => Boolean) = synchronized { f0 }
fcf(checkLocks(this)(this.getClass, O1, O1.getClass))
}
def g1 = checkLocks()(this, this.getClass, O1, O1.getClass)
@inline final def gi = checkLocks()(this, this.getClass, O1, O1.getClass)
val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, O1, O1.getClass)
def gf = {
lazy val gfv: AnyRef => Boolean = lock => checkLocks()(lock, lock.getClass, gfv, gfv.getClass, O1, O1.getClass)
gfv(this)
}
def gl = {
lazy val glv = checkLocks()(this, this.getClass, O1, O1.getClass)
glv
}
}
}
trait T {
import Util._
val lock = new AnyRef
def f1 = synchronized { checkLocks(this)(this.getClass, classOf[T], classOf[C2], O2.getClass) }
@inline final def fi = synchronized { checkLocks(this)(this.getClass, classOf[T], classOf[C2], O2.getClass) }
val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, classOf[T], classOf[C2], O2.getClass) }
def ff = {
lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, classOf[T], classOf[C2], O2.getClass) }
ffv(this)
}
def fl = {
lazy val flv = synchronized { checkLocks(this)(this.getClass, classOf[T], classOf[C2], O2.getClass) }
flv
}
def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, classOf[T], classOf[C2], O2.getClass) }
def fc = {
def fcf(f0: => Boolean) = synchronized { f0 }
fcf(checkLocks(this)(this.getClass, classOf[T], classOf[C2], O2.getClass))
}
def g1 = checkLocks()(this, this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
@inline final def gi = checkLocks()(this, this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, classOf[T], classOf[C2], O2, O2.getClass)
def gf = {
lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, classOf[T], classOf[C2], O2, O2.getClass)
gfv(this)
}
def gl = {
lazy val glv = checkLocks()(this, this.getClass, classOf[T], classOf[C2], O2.getClass)
glv
}
class C {
def f1 = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
@inline final def fi = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, fv, fv.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
def ff = {
lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
ffv(this)
}
def fl = {
lazy val flv = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
flv
}
def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
def fn = T.this.synchronized { checkLocks(T.this)(T.this.getClass, this, this.getClass, classOf[T], classOf[C2], O2.getClass) }
def fc = {
def fcf(f0: => Boolean) = synchronized { f0 }
fcf(checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass))
}
def g1 = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
@inline final def gi = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
val gv: () => Boolean = () => checkLocks()(this, this.getClass, T.this, T.this.getClass, gv, gv.getClass, classOf[T], classOf[C2], O2, O2.getClass)
def gf = {
lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
gfv(this)
}
def gl = {
lazy val glv = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
glv
}
}
val c = new C
object O {
def f1 = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
@inline final def fi = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
def ff = {
lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(lock.getClass, ffv, ffv.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
ffv(this)
}
def fl = {
lazy val flv = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
flv
}
def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) }
def fn = T.this.synchronized { checkLocks(T.this)(T.this.getClass, this, this.getClass, classOf[T], classOf[C2], O2.getClass) }
def fc = {
def fcf(f0: => Boolean) = synchronized { f0 }
fcf(checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass))
}
def g1 = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
@inline final def gi = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
def gf = {
lazy val gfv: AnyRef => Boolean = lock => checkLocks()(lock, lock.getClass, gfv, gfv.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
gfv(this)
}
def gl = {
lazy val glv = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)
glv
}
}
}
class C2 extends T
object O2 extends T
object Test extends App {
def check(name: String, result: Boolean): Unit = println("%-10s %s".format(name +":", if (result) "OK" else "FAILED"))
val c1 = new C1
check("c1.f1", c1.f1)
check("c1.fi", c1.fi)
check("c1.fv", c1.fv())
check("c1.ff", c1.ff)
check("c1.fl", c1.fl)
check("c1.fo", c1.fo)
check("c1.fc", c1.fc)
check("c1.g1", c1.g1)
check("c1.gi", c1.gi)
check("c1.gv", c1.gv())
check("c1.gf", c1.gf)
// check("c1.gl", c1.gl) // FIXME *.gl are failing because of the issue described in SUGGEST-11
check("c1.c.f1", c1.c.f1)
check("c1.c.fi", c1.c.fi)
check("c1.c.fv", c1.c.fv())
check("c1.c.ff", c1.c.ff)
check("c1.c.fl", c1.c.fl)
check("c1.c.fo", c1.c.fo)
check("c1.c.fn", c1.c.fn)
check("c1.c.fc", c1.c.fc)
check("c1.c.g1", c1.c.g1)
check("c1.c.gi", c1.c.gi)
check("c1.c.gv", c1.c.gv())
check("c1.c.gf", c1.c.gf)
// check("c1.c.gl", c1.c.gl)
check("c1.O.f1", c1.O.f1)
check("c1.O.fi", c1.O.fi)
check("c1.O.fv", c1.O.fv())
check("c1.O.ff", c1.O.ff)
check("c1.O.fl", c1.O.fl)
check("c1.O.fo", c1.O.fo)
check("c1.O.fn", c1.O.fn)
check("c1.O.fc", c1.O.fc)
check("c1.O.g1", c1.O.g1)
check("c1.O.gi", c1.O.gi)
check("c1.O.gv", c1.O.gv())
check("c1.O.gf", c1.O.gf)
// check("c1.O.gl", c1.O.gl)
check("O1.f1", O1.f1)
check("O1.fi", O1.fi)
check("O1.fv", O1.fv())
check("O1.ff", O1.ff)
check("O1.fl", O1.fl)
check("O1.fo", O1.fo)
check("O1.fc", O1.fc)
check("O1.g1", O1.g1)
check("O1.gi", O1.gi)
check("O1.gv", O1.gv())
check("O1.gf", O1.gf)
// check("O1.gl", O1.gl)
check("O1.c.f1", O1.c.f1)
check("O1.c.fi", O1.c.fi)
check("O1.c.fv", O1.c.fv())
check("O1.c.ff", O1.c.ff)
check("O1.c.fl", O1.c.fl)
check("O1.c.fo", O1.c.fo)
check("O1.c.fn", O1.c.fn)
check("O1.c.fc", O1.c.fc)
check("O1.c.g1", O1.c.g1)
check("O1.c.gi", O1.c.gi)
check("O1.c.gv", O1.c.gv())
check("O1.c.gf", O1.c.gf)
// check("O1.c.gl", O1.c.gl)
check("O1.O.f1", O1.O.f1)
check("O1.O.fi", O1.O.fi)
check("O1.O.fv", O1.O.fv())
check("O1.O.ff", O1.O.ff)
check("O1.O.fl", O1.O.fl)
check("O1.O.fo", O1.O.fo)
check("O1.O.fn", O1.O.fn)
check("O1.O.fc", O1.O.fc)
check("O1.O.g1", O1.O.g1)
check("O1.O.gi", O1.O.gi)
check("O1.O.gv", O1.O.gv())
check("O1.O.gf", O1.O.gf)
// check("O1.O.gl", O1.O.gl)
val c2 = new C2
check("c2.f1", c2.f1)
check("c2.fi", c2.fi)
check("c2.fv", c2.fv())
check("c2.ff", c2.ff)
check("c2.fl", c2.fl)
check("c2.fo", c2.fo)
check("c2.fc", c2.fc)
check("c2.g1", c2.g1)
check("c2.gi", c2.gi)
check("c2.gv", c2.gv())
check("c2.gf", c2.gf)
// check("c2.gl", c2.gl)
check("c2.c.f1", c2.c.f1)
check("c2.c.fi", c2.c.fi)
check("c2.c.fv", c2.c.fv())
check("c2.c.ff", c2.c.ff)
check("c2.c.fl", c2.c.fl)
check("c2.c.fo", c2.c.fo)
check("c2.c.fn", c2.c.fn)
check("c2.c.fc", c2.c.fc)
check("c2.c.g1", c2.c.g1)
check("c2.c.gi", c2.c.gi)
check("c2.c.gv", c2.c.gv())
check("c2.c.gf", c2.c.gf)
// check("c2.c.gl", c2.c.gl)
check("c2.O.f1", c2.O.f1)
check("c2.O.fi", c2.O.fi)
check("c2.O.fv", c2.O.fv())
check("c2.O.ff", c2.O.ff)
check("c2.O.fl", c2.O.fl)
check("c2.O.fo", c2.O.fo)
check("c2.O.fn", c2.O.fn)
check("c2.O.fc", c2.O.fc)
check("c2.O.g1", c2.O.g1)
check("c2.O.gi", c2.O.gi)
check("c2.O.gv", c2.O.gv())
check("c2.O.gf", c2.O.gf)
// check("c2.O.gl", c2.O.gl)
check("O2.f1", O2.f1)
check("O2.fi", O2.fi)
check("O2.fv", O2.fv())
check("O2.ff", O2.ff)
check("O2.fl", O2.fl)
check("O2.fo", O2.fo)
check("O2.fc", O2.fc)
check("O2.g1", O2.g1)
check("O2.gi", O2.gi)
check("O2.gv", O2.gv())
check("O2.gf", O2.gf)
// check("O2.gl", O2.gl)
check("O2.c.f1", O2.c.f1)
check("O2.c.fi", O2.c.fi)
check("O2.c.fv", O2.c.fv())
check("O2.c.ff", O2.c.ff)
check("O2.c.fl", O2.c.fl)
check("O2.c.fo", O2.c.fo)
check("O2.c.fn", O2.c.fn)
check("O2.c.fc", O2.c.fc)
check("O2.c.g1", O2.c.g1)
check("O2.c.gi", O2.c.gi)
check("O2.c.gv", O2.c.gv())
check("O2.c.gf", O2.c.gf)
// check("O2.c.gl", O2.c.gl)
check("O2.O.f1", O2.O.f1)
check("O2.O.fi", O2.O.fi)
check("O2.O.fv", O2.O.fv())
check("O2.O.ff", O2.O.ff)
check("O2.O.fl", O2.O.fl)
check("O2.O.fo", O2.O.fo)
check("O2.O.fn", O2.O.fn)
check("O2.O.fc", O2.O.fc)
check("O2.O.g1", O2.O.g1)
check("O2.O.gi", O2.O.gi)
check("O2.O.gv", O2.O.gv())
check("O2.O.gf", O2.O.gf)
// check("O2.O.gl", O2.O.gl)
}
| lrytz/scala | test/files/run/synchronized.scala | Scala | apache-2.0 | 19,276 |
package source
import akka.NotUsed
import akka.stream.scaladsl.Source
import logic._
import logic.game._
object InitialGameStateSource {
def getInitialGameStateSource: Source[GameState, NotUsed] = Source.single(
Running(
foodPosition = initialFoodPosition,
snake = initialSnake,
seed = initialSeed,
mapSize = mapSize,
snakeMovementTimer = SnakeMovementTimer())
)
//TODO: Generate an initial "random" state within the GameStateEngine?
private val initialFoodPosition = Position(25, 25)
private val initialSnake = Snake(Seq(Position(40, 40), Position(40, 39)), Up)
private val initialSeed = System.currentTimeMillis()
private val mapSize = 50
} | margorczynski/stateless-snake | src/main/scala/source/InitialGameStateSource.scala | Scala | apache-2.0 | 709 |
package org.f100ded.play.fakews
import akka.stream.Materializer
import akka.stream.scaladsl.Source
import akka.util.ByteString
import play.api.libs.ws.{EmptyBody, InMemoryBody, SourceBody, WSBody}
import scala.concurrent.Await
import scala.concurrent.duration._
private[fakews] object BodyUtils {
def bodyAsBytes(body: WSBody)(implicit mat: Materializer): ByteString = body match {
case EmptyBody => ByteString.empty
case InMemoryBody(bytes) => bytes
case SourceBody(source) => blockingSourceToBytes(source)
}
private def blockingSourceToBytes(source: Source[ByteString, _])
(implicit mat: Materializer): ByteString = {
import scala.concurrent.ExecutionContext.Implicits.global
val result = source.runFold(ByteString.createBuilder) {
case (acc, bytes) => acc.append(bytes)
}.map(_.result())
Await.result(result, 100.milliseconds)
}
}
| f100ded/play-fake-ws-standalone | src/main/scala/org/f100ded/play/fakews/BodyUtils.scala | Scala | apache-2.0 | 918 |
package views.forms
import views.html.helper.FieldElements
object FormHelpers {
import views.html.helper.FieldConstructor
implicit val bootstrapVerticalFormFieldConstructor =
FieldConstructor(views.html.forms.bootstrapVerticalFormFieldConstrutor.f)
} | Bhashit/play-commerce | app/views/forms/FormHelpers.scala | Scala | mit | 261 |
package com.celexus.conniption
import org.scalatest.junit.AssertionsForJUnit
import org.junit.Test
import org.junit.Assert._
class TradeKingAPITest extends AssertionsForJUnit {
@Test
def params = {
val tk = new TradeKingAPI
assertEquals(tk.getAccessTokenEndpoint, "https://developers.tradeking.com/oauth/access_token")
assertEquals(tk.getAuthorizationUrl(null), "https://developers.tradeking.com/oauth/authorize?oauth_token=%s")
assertEquals(tk.getRequestTokenEndpoint, "https://developers.tradeking.com/oauth/request_token")
}
}
| Ccook/conniption4s | src/test/scala/com/celexus/conniption/TradeKingAPITest.scala | Scala | apache-2.0 | 555 |
package controllers
import ornicar.scalalib.Zero
import play.api.data._
import play.api.data.Forms._
import play.api.mvc._
import scala.annotation.nowarn
import views._
import lila.api.{ BodyContext, Context }
import lila.app._
import lila.chat.Chat
import lila.common.{ EmailAddress, HTTPRequest, IpAddress }
import lila.mod.UserSearch
import lila.report.{ Suspect, Mod => AsMod }
import lila.security.{ FingerHash, Granter, Permission }
import lila.user.{ User => UserModel, Title, Holder }
final class Mod(
env: Env,
reportC: => Report,
userC: => User
) extends LilaController(env) {
private def modApi = env.mod.api
private def modLogApi = env.mod.logApi
private def assessApi = env.mod.assessApi
implicit private def asMod(holder: Holder) = AsMod(holder.user)
def alt(username: String, v: Boolean) =
OAuthModBody(_.CloseAccount) { me =>
withSuspect(username) { sus =>
for {
inquiry <- env.report.api.inquiries ofModId me.id
_ <- modApi.setAlt(me, sus, v)
_ <- (v && sus.user.enabled) ?? env.closeAccount(sus.user, me)
} yield (inquiry, sus).some
}
}(ctx =>
me => { case (inquiry, suspect) =>
reportC.onInquiryClose(inquiry, me, suspect.some)(ctx)
}
)
def engine(username: String, v: Boolean) =
OAuthModBody(_.MarkEngine) { me =>
withSuspect(username) { sus =>
for {
inquiry <- env.report.api.inquiries ofModId me.id
_ <- modApi.setEngine(me, sus, v)
} yield (inquiry, sus).some
}
}(ctx =>
me => { case (inquiry, suspect) =>
reportC.onInquiryClose(inquiry, me, suspect.some)(ctx)
}
)
def publicChat =
Secure(_.PublicChatView) { implicit ctx => _ =>
env.mod.publicChat.all map { case (tournamentsAndChats, swissesAndChats) =>
Ok(html.mod.publicChat(tournamentsAndChats, swissesAndChats))
}
}
def publicChatTimeout =
SecureBody(_.ChatTimeout) { implicit ctx => me =>
FormResult(lila.chat.ChatTimeout.form) { data =>
env.chat.api.userChat.publicTimeout(data, me)
}(ctx.body)
}
def booster(username: String, v: Boolean) =
OAuthModBody(_.MarkBooster) { me =>
withSuspect(username) { prev =>
for {
inquiry <- env.report.api.inquiries ofModId me.id
suspect <- modApi.setBoost(me, prev, v)
} yield (inquiry, suspect).some
}
}(ctx =>
me => { case (inquiry, suspect) =>
reportC.onInquiryClose(inquiry, me, suspect.some)(ctx)
}
)
def troll(username: String, v: Boolean) =
OAuthModBody(_.Shadowban) { me =>
withSuspect(username) { prev =>
for {
inquiry <- env.report.api.inquiries ofModId me.id
suspect <- modApi.setTroll(me, prev, v)
} yield (inquiry, suspect).some
}
}(ctx =>
me => { case (inquiry, suspect) =>
reportC.onInquiryClose(inquiry, me, suspect.some)(ctx)
}
)
def warn(username: String, subject: String) =
OAuthModBody(_.ModMessage) { me =>
env.mod.presets.getPmPresets(me.user).named(subject) ?? { preset =>
withSuspect(username) { prev =>
for {
inquiry <- env.report.api.inquiries ofModId me.id
suspect <- modApi.setTroll(me, prev, prev.user.marks.troll)
_ <- env.msg.api.systemPost(suspect.user.id, preset.text)
_ <- env.mod.logApi.modMessage(me.id, suspect.user.id, preset.name)
} yield (inquiry, suspect).some
}
}
}(ctx =>
me => { case (inquiry, suspect) =>
reportC.onInquiryClose(inquiry, me, suspect.some)(ctx)
}
)
def kid(username: String) =
OAuthMod(_.SetKidMode) { _ => me =>
modApi.setKid(me.id, username) map some
}(actionResult(username))
def deletePmsAndChats(username: String) =
OAuthMod(_.Shadowban) { _ => _ =>
withSuspect(username) { sus =>
env.mod.publicChat.deleteAll(sus) >>
env.msg.api.deleteAllBy(sus.user) map some
}
}(actionResult(username))
def disableTwoFactor(username: String) =
OAuthMod(_.DisableTwoFactor) { _ => me =>
modApi.disableTwoFactor(me.id, username) map some
}(actionResult(username))
def closeAccount(username: String) =
OAuthMod(_.CloseAccount) { _ => me =>
env.user.repo named username flatMap {
_ ?? { user =>
env.closeAccount(user, me) map some
}
}
}(actionResult(username))
def reopenAccount(username: String) =
OAuthMod(_.CloseAccount) { _ => me =>
modApi.reopenAccount(me.id, username) map some
}(actionResult(username))
def reportban(username: String, v: Boolean) =
OAuthMod(_.ReportBan) { _ => me =>
withSuspect(username) { sus =>
modApi.setReportban(me, sus, v) map some
}
}(actionResult(username))
def rankban(username: String, v: Boolean) =
OAuthMod(_.RemoveRanking) { _ => me =>
withSuspect(username) { sus =>
modApi.setRankban(me, sus, v) map some
}
}(actionResult(username))
def impersonate(username: String) =
Auth { implicit ctx => me =>
if (username == "-" && env.mod.impersonate.isImpersonated(me)) fuccess {
env.mod.impersonate.stop(me)
Redirect(routes.User.show(me.username))
}
else if (isGranted(_.Impersonate) || (isGranted(_.Admin) && username.toLowerCase == "lichess"))
OptionFuRedirect(env.user.repo named username) { user =>
env.mod.impersonate.start(me, user)
fuccess(routes.User.show(user.username))
}
else notFound
}
def setTitle(username: String) =
SecureBody(_.SetTitle) { implicit ctx => me =>
implicit def req = ctx.body
lila.user.UserForm.title
.bindFromRequest()
.fold(
_ => fuccess(redirect(username, mod = true)),
title =>
modApi.setTitle(me.id, username, title map Title.apply) >>
env.mailer.automaticEmail.onTitleSet(username) >>-
env.user.lightUserApi.invalidate(UserModel normalize username) inject
redirect(username, mod = false)
)
}
def setEmail(username: String) =
SecureBody(_.SetEmail) { implicit ctx => me =>
implicit def req = ctx.body
OptionFuResult(env.user.repo named username) { user =>
env.security.forms
.modEmail(user)
.bindFromRequest()
.fold(
err => BadRequest(err.toString).fuccess,
rawEmail => {
val email = env.security.emailAddressValidator
.validate(EmailAddress(rawEmail)) err s"Invalid email $rawEmail"
modApi.setEmail(me.id, user.id, email.acceptable) inject redirect(user.username, mod = true)
}
)
}
}
def inquiryToZulip =
Secure(_.SendToZulip) { _ => me =>
env.report.api.inquiries ofModId me.id flatMap {
case None => Redirect(routes.Report.list).fuccess
case Some(report) =>
env.user.repo named report.user flatMap {
_ ?? { user =>
import lila.report.Room
import lila.irc.IrcApi.ModDomain
env.irc.api.inquiry(
user = user,
mod = me,
domain = report.room match {
case Room.Cheat | Room.Boost => ModDomain.Hunt
case Room.Comm => ModDomain.Comm
// spontaneous inquiry
case _ if Granter(_.Admin)(me.user) => ModDomain.Admin
case _ if Granter(_.Hunter)(me.user) => ModDomain.Hunt // heuristic
case _ if Granter(_.Shusher)(me.user) => ModDomain.Comm
case _ => ModDomain.Admin
},
room = if (report.isSpontaneous) "Spontaneous inquiry" else report.room.name
) inject NoContent
}
}
}
}
def table =
Secure(_.ModLog) { implicit ctx => _ =>
modApi.allMods map { html.mod.table(_) }
}
private def communications(username: String, priv: Boolean) =
Secure { perms =>
if (priv) perms.ViewPrivateComms else perms.Shadowban
} { implicit ctx => me =>
OptionFuOk(env.user.repo named username) { user =>
implicit val renderIp = env.mod.ipRender(me)
env.game.gameRepo
.recentPovsByUserFromSecondary(user, 80)
.mon(_.mod.comm.segment("recentPovs"))
.flatMap { povs =>
priv.?? {
env.chat.api.playerChat
.optionsByOrderedIds(povs.map(_.gameId).map(Chat.Id.apply))
.mon(_.mod.comm.segment("playerChats"))
} zip
priv.?? {
env.msg.api
.recentByForMod(user, 30)
.mon(_.mod.comm.segment("pms"))
} zip
(env.shutup.api getPublicLines user.id)
.mon(_.mod.comm.segment("publicChats")) zip
env.user.noteApi
.byUserForMod(user.id)
.mon(_.mod.comm.segment("notes")) zip
env.mod.logApi
.userHistory(user.id)
.mon(_.mod.comm.segment("history")) zip
env.report.api.inquiries
.ofModId(me.id)
.mon(_.mod.comm.segment("inquiries")) zip
env.security.userLogins(user, 100).flatMap {
userC.loginsTableData(user, _, 100)
} flatMap { case ((((((chats, convos), publicLines), notes), history), inquiry), logins) =>
if (priv) {
if (!inquiry.??(_.isRecentCommOf(Suspect(user)))) {
env.irc.api.commlog(mod = me, user = user, inquiry.map(_.oldestAtom.by.value))
if (isGranted(_.MonitoredMod))
env.irc.api.monitorMod(
me.id,
"eyes",
s"spontaneously checked out @${user.username}'s private comms",
lila.irc.IrcApi.ModDomain.Comm
)
}
}
env.appeal.api.byUserIds(user.id :: logins.userLogins.otherUserIds) map { appeals =>
html.mod.communication(
me,
user,
(povs zip chats) collect {
case (p, Some(c)) if c.nonEmpty => p -> c
} take 15,
convos,
publicLines,
notes.filter(_.from != "irwin"),
history,
logins,
appeals,
priv
)
}
}
}
}
}
def communicationPublic(username: String) = communications(username, priv = false)
def communicationPrivate(username: String) = communications(username, priv = true)
protected[controllers] def redirect(username: String, mod: Boolean = true) =
Redirect(userUrl(username, mod))
protected[controllers] def userUrl(username: String, mod: Boolean = true) =
s"${routes.User.show(username).url}${mod ?? "?mod"}"
def refreshUserAssess(username: String) =
Secure(_.MarkEngine) { implicit ctx => me =>
OptionFuResult(env.user.repo named username) { user =>
assessApi.refreshAssessOf(user) >>
env.irwin.api.requests.fromMod(Suspect(user), me) >>
userC.renderModZoneActions(username)
}
}
def spontaneousInquiry(username: String) =
Secure(_.SeeReport) { implicit ctx => me =>
OptionFuResult(env.user.repo named username) { user =>
(isGranted(_.Appeals) ?? env.appeal.api.exists(user)) flatMap { isAppeal =>
isAppeal.??(env.report.api.inquiries.ongoingAppealOf(user.id)) flatMap {
case Some(ongoing) if ongoing.mod != me.id =>
env.user.lightUserApi.asyncFallback(ongoing.mod) map { mod =>
Redirect(routes.Appeal.show(user.username))
.flashFailure(s"Currently processed by ${mod.name}")
}
case _ =>
val f =
if (isAppeal) env.report.api.inquiries.appeal _
else env.report.api.inquiries.spontaneous _
f(me, Suspect(user)) inject {
if (isAppeal) Redirect(s"${routes.Appeal.show(user.username)}#appeal-actions")
else redirect(user.username, mod = true)
}
}
}
}
}
def gamify =
Secure(_.GamifyView) { implicit ctx => _ =>
env.mod.gamify.leaderboards zip
env.mod.gamify.history(orCompute = true) map { case (leaderboards, history) =>
Ok(html.mod.gamify.index(leaderboards, history))
}
}
def gamifyPeriod(periodStr: String) =
Secure(_.GamifyView) { implicit ctx => _ =>
lila.mod.Gamify.Period(periodStr).fold(notFound) { period =>
env.mod.gamify.leaderboards map { leaderboards =>
Ok(html.mod.gamify.period(leaderboards, period))
}
}
}
def activity = activityOf("team", "month")
def activityOf(who: String, period: String) =
Secure(_.GamifyView) { implicit ctx => me =>
env.mod.activity(who, period)(me.user) map { activity =>
Ok(html.mod.activity(activity))
}
}
def queues(period: String) =
Secure(_.GamifyView) { implicit ctx => me =>
env.mod.queueStats(period) map { stats =>
Ok(html.mod.queueStats(stats))
}
}
def search =
SecureBody(_.UserSearch) { implicit ctx => me =>
implicit def req = ctx.body
val f = UserSearch.form
f.bindFromRequest()
.fold(
err => BadRequest(html.mod.search(me, err, Nil)).fuccess,
query => env.mod.search(query) map { html.mod.search(me, f.fill(query), _) }
)
}
protected[controllers] def searchTerm(me: Holder, q: String)(implicit ctx: Context) = {
env.mod.search(q) map { users =>
Ok(html.mod.search(me, UserSearch.form fill q, users))
}
}
def print(fh: String) =
SecureBody(_.ViewPrintNoIP) { implicit ctx => me =>
val hash = FingerHash(fh)
for {
uids <- env.security.api recentUserIdsByFingerHash hash
users <- env.user.repo usersFromSecondary uids.reverse
withEmails <- env.user.repo withEmailsU users
uas <- env.security.api.printUas(hash)
} yield Ok(html.mod.search.print(me, hash, withEmails, uas, env.security.printBan blocks hash))
}
def printBan(v: Boolean, fh: String) =
Secure(_.PrintBan) { _ => _ =>
env.security.printBan.toggle(FingerHash(fh), v) inject
Redirect(routes.Mod.print(fh))
}
def singleIp(ip: String) =
SecureBody(_.ViewPrintNoIP) { implicit ctx => me =>
implicit val renderIp = env.mod.ipRender(me)
env.mod.ipRender.decrypt(ip) ?? { address =>
for {
uids <- env.security.api recentUserIdsByIp address
users <- env.user.repo usersFromSecondary uids.reverse
withEmails <- env.user.repo withEmailsU users
uas <- env.security.api.ipUas(address)
} yield Ok(html.mod.search.ip(me, address, withEmails, uas, env.security.firewall blocksIp address))
}
}
def singleIpBan(v: Boolean, ip: String) =
Secure(_.IpBan) { ctx => _ =>
val op =
if (v) env.security.firewall.blockIps _
else env.security.firewall.unblockIps _
op(IpAddress from ip) inject {
if (HTTPRequest isXhr ctx.req) jsonOkResult
else Redirect(routes.Mod.singleIp(ip))
}
}
def chatUser(username: String) =
Secure(_.ChatTimeout) { _ => _ =>
implicit val lightUser = env.user.lightUserSync
JsonOptionOk {
env.chat.api.userChat userModInfo username map2 lila.chat.JsonView.userModInfo
}
}
def permissions(username: String) =
Secure(_.ChangePermission) { implicit ctx => me =>
OptionOk(env.user.repo named username) { user =>
html.mod.permissions(user, me)
}
}
def savePermissions(username: String) =
SecureBody(_.ChangePermission) { implicit ctx => me =>
implicit def req = ctx.body
import lila.security.Permission
OptionFuResult(env.user.repo named username) { user =>
Form(
single("permissions" -> list(text.verifying(Permission.allByDbKey.contains _)))
).bindFromRequest()
.fold(
_ => BadRequest(html.mod.permissions(user, me)).fuccess,
permissions => {
val newPermissions = Permission(permissions) diff Permission(user.roles)
modApi.setPermissions(me, user.username, Permission(permissions)) >> {
newPermissions(Permission.Coach) ?? env.mailer.automaticEmail.onBecomeCoach(user)
} >> {
Permission(permissions)
.exists(_ is Permission.SeeReport) ?? env.plan.api.setLifetime(user)
} inject Redirect(routes.Mod.permissions(username)).flashSuccess
}
)
}
}
def emailConfirm =
SecureBody(_.SetEmail) { implicit ctx => me =>
get("q") match {
case None => Ok(html.mod.emailConfirm("", none, none)).fuccess
case Some(rawQuery) =>
val query = rawQuery.trim.split(' ').toList
val email = query.headOption
.map(EmailAddress.apply) flatMap env.security.emailAddressValidator.validate
val username = query lift 1
def tryWith(setEmail: EmailAddress, q: String): Fu[Option[Result]] =
env.mod.search(q) flatMap {
case List(UserModel.WithEmails(user, _)) =>
(!user.everLoggedIn).?? {
lila.mon.user.register.modConfirmEmail.increment()
modApi.setEmail(me.id, user.id, setEmail)
} >>
env.user.repo.email(user.id) map { email =>
Ok(html.mod.emailConfirm("", user.some, email)).some
}
case _ => fuccess(none)
}
email.?? { em =>
tryWith(em.acceptable, em.acceptable.value) orElse {
username ?? { tryWith(em.acceptable, _) }
} recover lila.db.recoverDuplicateKey(_ => none)
} getOrElse BadRequest(html.mod.emailConfirm(rawQuery, none, none)).fuccess
}
}
def chatPanic =
Secure(_.Shadowban) { implicit ctx => _ =>
Ok(html.mod.chatPanic(env.chat.panic.get)).fuccess
}
def chatPanicPost =
OAuthMod(_.Shadowban) { req => me =>
val v = getBool("v", req)
env.chat.panic.set(v)
env.irc.api.chatPanic(me, v)
fuccess(().some)
}(_ => _ => _ => Redirect(routes.Mod.chatPanic).fuccess)
def presets(group: String) =
Secure(_.Presets) { implicit ctx => _ =>
env.mod.presets.get(group).fold(notFound) { setting =>
Ok(html.mod.presets(group, setting, setting.form)).fuccess
}
}
def presetsUpdate(group: String) =
SecureBody(_.Presets) { implicit ctx => _ =>
implicit val req = ctx.body
env.mod.presets.get(group).fold(notFound) { setting =>
setting.form
.bindFromRequest()
.fold(
err => BadRequest(html.mod.presets(group, setting, err)).fuccess,
v => setting.setString(v.toString) inject Redirect(routes.Mod.presets(group)).flashSuccess
)
}
}
def eventStream =
Scoped() { req => me =>
IfGranted(_.Admin, req, me) {
noProxyBuffer(Ok.chunked(env.mod.stream())).fuccess
}
}
private def withSuspect[A](username: String)(f: Suspect => Fu[A])(implicit zero: Zero[A]): Fu[A] =
env.report.api getSuspect username flatMap {
_ ?? f
}
private def OAuthMod[A](perm: Permission.Selector)(f: RequestHeader => Holder => Fu[Option[A]])(
secure: Context => Holder => A => Fu[Result]
): Action[Unit] =
SecureOrScoped(perm)(
secure = ctx => me => f(ctx.req)(me) flatMap { _ ?? secure(ctx)(me) },
scoped = req =>
me =>
f(req)(me) flatMap { res =>
res.isDefined ?? fuccess(jsonOkResult)
}
)
private def OAuthModBody[A](perm: Permission.Selector)(f: Holder => Fu[Option[A]])(
secure: BodyContext[_] => Holder => A => Fu[Result]
): Action[AnyContent] =
SecureOrScopedBody(perm)(
secure = ctx => me => f(me) flatMap { _ ?? secure(ctx)(me) },
scoped = _ =>
me =>
f(me) flatMap { res =>
res.isDefined ?? fuccess(jsonOkResult)
}
)
private def actionResult(
username: String
)(ctx: Context)(@nowarn("cat=unused") user: Holder)(@nowarn("cat=unused") res: Any) =
if (HTTPRequest isSynchronousHttp ctx.req) fuccess(redirect(username))
else userC.renderModZoneActions(username)(ctx)
}
| luanlv/lila | app/controllers/Mod.scala | Scala | mit | 21,028 |
package notebook
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpec}
class JobTrackingTests extends WordSpec with Matchers with BeforeAndAfterAll {
"encodes jobGroup" in {
JobTracking.jobGroupId(cellId = "abc") shouldBe "cell-abc"
}
"decodes cellId" in {
JobTracking.toCellId(Option("cell-abc")) shouldBe Some("abc")
}
"encodes jobDescription" in {
// must remove all special chars, especially " and \\n, so job description can be enclosed inside " ".
val code = """val x = sqlContext.sql("select * from users")
|.collect()
|.map { x: Row => s"$x\\"" }""".stripMargin
val expected = "run-1234567: val x = sqlContext.sql('select * from users').collect().map { x: Row = s'x'' }"
JobTracking.jobDescription(
cellCode = code,
runId = 1234567) shouldBe expected
}
"decodes runId" in {
JobTracking.getCellRunId(Option("run-1234567: val abc=rdd map x")) shouldBe Some(1234567L)
}
}
| andypetrella/spark-notebook | test/notebook/JobTrackingTests.scala | Scala | apache-2.0 | 959 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE
* file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file
* to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the
* License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package kafka.api
import java.io.File
import kafka.server.KafkaConfig
import org.junit.jupiter.api.{AfterEach, BeforeEach, Test}
import kafka.utils.JaasTestUtils
import org.apache.kafka.common.security.auth.SecurityProtocol
import scala.jdk.CollectionConverters._
class SaslMultiMechanismConsumerTest extends BaseConsumerTest with SaslSetup {
private val kafkaClientSaslMechanism = "PLAIN"
private val kafkaServerSaslMechanisms = List("GSSAPI", "PLAIN")
this.serverConfig.setProperty(KafkaConfig.ZkEnableSecureAclsProp, "true")
override protected def securityProtocol = SecurityProtocol.SASL_SSL
override protected lazy val trustStoreFile = Some(File.createTempFile("truststore", ".jks"))
override protected val serverSaslProperties = Some(kafkaServerSaslProperties(kafkaServerSaslMechanisms, kafkaClientSaslMechanism))
override protected val clientSaslProperties = Some(kafkaClientSaslProperties(kafkaClientSaslMechanism))
@BeforeEach
override def setUp(): Unit = {
startSasl(jaasSections(kafkaServerSaslMechanisms, Some(kafkaClientSaslMechanism), Both,
JaasTestUtils.KafkaServerContextName))
super.setUp()
}
@AfterEach
override def tearDown(): Unit = {
super.tearDown()
closeSasl()
}
@Test
def testMultipleBrokerMechanisms(): Unit = {
val plainSaslProducer = createProducer()
val plainSaslConsumer = createConsumer()
val gssapiSaslProperties = kafkaClientSaslProperties("GSSAPI", dynamicJaasConfig = true)
val gssapiSaslProducer = createProducer(configOverrides = gssapiSaslProperties)
val gssapiSaslConsumer = createConsumer(configOverrides = gssapiSaslProperties)
val numRecords = 1000
var startingOffset = 0
// Test SASL/PLAIN producer and consumer
var startingTimestamp = System.currentTimeMillis()
sendRecords(plainSaslProducer, numRecords, tp, startingTimestamp = startingTimestamp)
plainSaslConsumer.assign(List(tp).asJava)
plainSaslConsumer.seek(tp, 0)
consumeAndVerifyRecords(consumer = plainSaslConsumer, numRecords = numRecords, startingOffset = startingOffset,
startingTimestamp = startingTimestamp)
sendAndAwaitAsyncCommit(plainSaslConsumer)
startingOffset += numRecords
// Test SASL/GSSAPI producer and consumer
startingTimestamp = System.currentTimeMillis()
sendRecords(gssapiSaslProducer, numRecords, tp, startingTimestamp = startingTimestamp)
gssapiSaslConsumer.assign(List(tp).asJava)
gssapiSaslConsumer.seek(tp, startingOffset)
consumeAndVerifyRecords(consumer = gssapiSaslConsumer, numRecords = numRecords, startingOffset = startingOffset,
startingTimestamp = startingTimestamp)
sendAndAwaitAsyncCommit(gssapiSaslConsumer)
startingOffset += numRecords
// Test SASL/PLAIN producer and SASL/GSSAPI consumer
startingTimestamp = System.currentTimeMillis()
sendRecords(plainSaslProducer, numRecords, tp, startingTimestamp = startingTimestamp)
gssapiSaslConsumer.assign(List(tp).asJava)
gssapiSaslConsumer.seek(tp, startingOffset)
consumeAndVerifyRecords(consumer = gssapiSaslConsumer, numRecords = numRecords, startingOffset = startingOffset,
startingTimestamp = startingTimestamp)
startingOffset += numRecords
// Test SASL/GSSAPI producer and SASL/PLAIN consumer
startingTimestamp = System.currentTimeMillis()
sendRecords(gssapiSaslProducer, numRecords, tp, startingTimestamp = startingTimestamp)
plainSaslConsumer.assign(List(tp).asJava)
plainSaslConsumer.seek(tp, startingOffset)
consumeAndVerifyRecords(consumer = plainSaslConsumer, numRecords = numRecords, startingOffset = startingOffset,
startingTimestamp = startingTimestamp)
}
}
| guozhangwang/kafka | core/src/test/scala/integration/kafka/api/SaslMultiMechanismConsumerTest.scala | Scala | apache-2.0 | 4,486 |
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package play.sbt.routes
import play.routes.compiler.RoutesCompiler.GeneratedSource
import sbt._
import xsbti.Position
import java.util.Optional
import scala.collection.mutable
import scala.language.implicitConversions
/**
* Fix compatibility issues for RoutesCompiler. This is the version compatible with sbt 1.0.
*/
private[routes] trait RoutesCompilerCompat {
val routesPositionMapper: Position => Option[Position] = position => {
position.sourceFile.asScala.collect {
case GeneratedSource(generatedSource) => {
new xsbti.Position {
override lazy val line: Optional[Integer] = {
position.line.asScala
.flatMap(l => generatedSource.mapLine(l.asInstanceOf[Int]))
.map(l => l.asInstanceOf[java.lang.Integer])
.asJava
}
override lazy val lineContent: String = {
line.asScala.flatMap { lineNumber =>
sourceFile.asScala.flatMap { file =>
IO.read(file).split('\\n').lift(lineNumber - 1)
}
}.getOrElse("")
}
override val offset: Optional[Integer] = Optional.empty[java.lang.Integer]
override val pointer: Optional[Integer] = Optional.empty[java.lang.Integer]
override val pointerSpace: Optional[String] = Optional.empty[String]
override val sourceFile: Optional[File] = Optional.ofNullable(generatedSource.source.get)
override val sourcePath: Optional[String] = Optional.ofNullable(sourceFile.get.getCanonicalPath)
override lazy val toString: String = {
val sb = new mutable.StringBuilder()
if (sourcePath.isPresent) sb.append(sourcePath.get)
if (line.isPresent) sb.append(":").append(line.get)
if (lineContent.nonEmpty) sb.append("\\n").append(lineContent)
sb.toString()
}
}
}
}
}
}
| Shenker93/playframework | framework/src/sbt-plugin/src/main/scala-sbt-1.0/play/sbt/routes/RoutesCompilerCompat.scala | Scala | apache-2.0 | 1,995 |
package services
import play.api.mvc.{Security, Action, Controller}
import play.modules.reactivemongo.MongoController
import play.api.libs.concurrent.Execution.Implicits._
import play.api.Logger
import play.modules.reactivemongo.json.BSONFormats
import play.api.libs.json._
import play.api.libs.json.Json._
import scala.concurrent.Future
import reactivemongo.bson.BSONObjectID
import play.modules.reactivemongo.json.collection.JSONCollection
import reactivemongo.core.commands.{Update, FindAndModify}
import support.mongo.Implicits._
import play.api.libs.json.Reads._
import play.api.libs.functional.syntax._
import play.api.libs.json
object Statistics
extends Controller
with MongoController {
private val logger = Logger(getClass)
private implicit val objectIdFormat = BSONFormats.BSONObjectIDFormat
private implicit val documentFormat = BSONFormats.BSONDocumentFormat
protected def collection = db.collection[JSONCollection]("statistics0")
def createOne = Action.async(parse.json) {
implicit request =>
val transforms = {
(__ \\ 'action).json.pick.flatMap(v => (__ \\ 'action).json.put(v)).orElse(Reads.pure(Json.obj())) and
(__ \\ 'reported).json.put(JsNumber(System.currentTimeMillis))
}
def withGroup(updates: JsObject) = {
updates ++ request
.session
.get(Security.username)
.map(BSONObjectID(_))
.map(toJson(_))
.map(id => obj("group" -> obj("_id" -> id)))
.getOrElse(obj())
}
request.body.transform(transforms.reduce).map(withGroup) map {
updates =>
import support.mongo.FindAndModify
val command = FindAndModify
.collection(collection)
.insert(updates)
db.command(command).map(_.map(toJson(_)).map(Ok(_)).getOrElse(NotFound))
} recoverTotal {
error =>
Future(BadRequest(JsError.toFlatJson(error)))
}
}
}
| michaelahlers/team-awesome-wedding | app/services/Statistics.scala | Scala | mit | 1,958 |
package org.openurp.edu.eams.teach.lesson.service.limit.impl
import org.openurp.edu.teach.lesson.LessonLimitMeta
import org.openurp.edu.eams.teach.lesson.service.limit.LessonLimitMetaFilter
import org.openurp.edu.teach.lesson.LessonLimitMeta.LimitMeta
import org.openurp.edu.eams.teach.lesson.service.limit.LessonLimitMetaProvider
import org.beangle.commons.collection.Collections
import org.openurp.edu.teach.lesson.LessonLimitMeta
class DefaultLessonLimitMetaProvider extends LessonLimitMetaProvider {
var filters = Collections.newBuffer[LessonLimitMetaFilter]
def getLessonLimitMetas(): Seq[LimitMeta] = {
val results = Collections.newBuffer[LimitMeta]
val iter = LessonLimitMeta.values.iterator
while (iter.hasNext) {
val meta = iter.next()
val append = !filters.exists { f => !f.accept(meta) }
if (append) results += meta
}
results
}
def getLessonLimitMetaIds(): Seq[Int] = {
val results = Collections.newBuffer[Int]
val iter = LessonLimitMeta.values.iterator
while (iter.hasNext) {
val meta = iter.next()
val append = !filters.exists { f => !f.accept(meta) }
if (append) results += meta.id
}
results
}
def getLessonLimitMetaPairs(): Pair[Seq[Int], Seq[LimitMeta]] = {
val ids = Collections.newBuffer[Int]
val metas = Collections.newBuffer[LimitMeta]
val iter = LessonLimitMeta.values.iterator
while (iter.hasNext) {
val meta = iter.next()
val append = !filters.exists { f => !f.accept(meta) }
if (append) {
ids += meta.id
metas += meta
}
}
new Pair[Seq[Int], Seq[LimitMeta]](ids, metas)
}
}
| openurp/edu-eams-webapp | core/src/main/scala/org/openurp/edu/eams/teach/lesson/service/limit/impl/DefaultCourseLimitMetaEnumProvider.scala | Scala | gpl-3.0 | 1,657 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.jdbc
import java.sql.Types
import org.apache.spark.sql.types._
private case object TeradataDialect extends JdbcDialect {
override def canHandle(url: String): Boolean = { url.startsWith("jdbc:teradata") }
override def getJDBCType(dt: DataType): Option[JdbcType] = dt match {
case StringType => Some(JdbcType("VARCHAR(255)", java.sql.Types.VARCHAR))
case BooleanType => Option(JdbcType("CHAR(1)", java.sql.Types.CHAR))
case _ => None
}
// Teradata does not support cascading a truncation
override def isCascadingTruncateTable(): Option[Boolean] = Some(false)
/**
* The SQL query used to truncate a table. Teradata does not support the 'TRUNCATE' syntax that
* other dialects use. Instead, we need to use a 'DELETE FROM' statement.
* @param table The table to truncate.
* @param cascade Whether or not to cascade the truncation. Default value is the
* value of isCascadingTruncateTable(). Teradata does not support cascading a
* 'DELETE FROM' statement (and as mentioned, does not support 'TRUNCATE' syntax)
* @return The SQL query to use for truncating a table
*/
override def getTruncateQuery(
table: String,
cascade: Option[Boolean] = isCascadingTruncateTable): String = {
s"DELETE FROM $table ALL"
}
}
| pgandhi999/spark | sql/core/src/main/scala/org/apache/spark/sql/jdbc/TeradataDialect.scala | Scala | apache-2.0 | 2,140 |
package org.jetbrains.plugins.scala
package lang
package psi
package api
package expr
import com.intellij.psi.{PsiAnnotationMemberValue, PsiElement}
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.psi.api.base.ScConstructor
import org.jetbrains.plugins.scala.lang.psi.impl.expr.ScNameValuePairImpl
/**
* @author Alexander Podkhalyuzin
* Date: 07.03.2008
*/
trait ScAnnotationExpr extends ScalaPsiElement {
def constr = findChildByClassScala(classOf[ScConstructor])
def getAttributes: Seq[ScNameValuePair] = findArgExprs.map(_.findChildrenByType(ScalaElementTypes.ASSIGN_STMT)).getOrElse(Seq.empty).map {
case stmt: ScAssignStmt => new ScNameValueAssignment(stmt)
}
def getAnnotationParameters = findArgExprs.map(_.exprs).getOrElse(Seq.empty)
private def findArgExprs: Option[ScArgumentExprList] = {
val constr = findChildByClassScala(classOf[ScConstructor])
if (constr == null) return None
val args = constr.findFirstChildByType(ScalaElementTypes.ARG_EXPRS)
args match {
case scArgExpr: ScArgumentExprList => Some(scArgExpr)
case _ => None
}
}
private class ScNameValueAssignment(assign: ScAssignStmt) extends ScNameValuePairImpl(assign.getNode) {
override def nameId: PsiElement = assign.getLExpression
override def getValue: PsiAnnotationMemberValue = (assign.getRExpression map {
case annotationMember: PsiAnnotationMemberValue => annotationMember
case _ => null
}).orNull
}
} | LPTK/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/api/expr/ScAnnotationExpr.scala | Scala | apache-2.0 | 1,525 |
package examples
import io.gatling.core.Predef._
import io.gatling.http.Predef._
import scala.concurrent.duration._
class ConstantUsersSimulation extends Simulation {
val httpProtocol = http.baseURL("https://www.google.com")
val numberOfUsers = 10
val duration = 10
val scn = scenario("Constant Users traffic")
.exec(http("open_page")
.get("/")
.check(status.is(200)))
.inject(constantUsersPerSec(numberOfUsers) during (duration seconds))
setUp(scn).protocols(httpProtocol)
}
| stelmod/gatling-quickstart | src/test/scala/examples/ConstantUsersSimulation.scala | Scala | mit | 512 |
package lore.compiler.semantics.modules
import lore.compiler.core.Position
import lore.compiler.semantics.modules.LocalModule.ImportMap
import lore.compiler.semantics.{NameKind, NamePath}
import lore.compiler.syntax.DeclNode
/**
* A LocalModule is a lexical object that contains the import maps and names of local declarations for a local module
* declaration that surrounds its members in a fragment. It does not necessarily contain all declarations of the
* semantic global module known via the name path.
*
* Precedence in the same module is skewed towards local declarations: local declarations > imports > parent local
* declarations > parent imports > module members > root module members. (Also consult the specification on this.)
* This requires us to carry the local module forward throughout the compilation process, so that DeclNodes don't lose
* their connection to the other local declarations. It also requires us to create a parent/child relationship between
* nested local modules.
*
* Import maps are separated by name kind because some types and their corresponding modules may be placed in
* different namespaces. This is the case with the String type, which as a basic type is placed in the root namespace,
* and the corresponding module `lore.String`.
*
* @param localTypeNames Even though these names can be derived from `members`, we don't want this set and
* `localBindingNames` to have to be recreated every time a LocalModule is copied (for
* successively populating the import maps).
*/
case class LocalModule(
modulePath: NamePath,
parent: Option[LocalModule],
members: Vector[DeclNode],
localTypeNames: Set[String],
localBindingNames: Set[String],
typeImportMap: ImportMap,
bindingImportMap: ImportMap,
position: Position,
)(implicit globalModuleIndex: GlobalModuleIndex) {
/**
* Returns the full NamePath for the given simple name if it occurs in this local module, in one of the local
* module's parents, or globally as a module member of the current module or one of its parents.
*
* To decide global membership, the [[GlobalModuleIndex]] is taken into consideration.
*/
def getPath(memberName: String, nameKind: NameKind): Option[NamePath] = {
if (namesOf(nameKind).contains(memberName)) {
Some(modulePath + memberName)
} else {
importMapOf(nameKind).get(memberName).orElse {
parent match {
case Some(parent) => parent.getPath(memberName, nameKind)
case None => globalModuleIndex.getPath(modulePath, memberName, nameKind)
}
}
}
}
/**
* Turns a relative type path into an absolute type path. This works similar to type path resolution in TypeScopes.
* If the name cannot be found, the function returns None.
*/
def toAbsoluteTypePath(relativePath: NamePath): Option[NamePath] = {
if (!relativePath.isMultiple) {
getPath(relativePath.simpleName, NameKind.Type)
} else {
getPath(relativePath.headName, NameKind.Binding)
.map(_ ++ relativePath.tail)
}
}
private def namesOf(nameKind: NameKind): Set[String] = nameKind match {
case NameKind.Type => localTypeNames
case NameKind.Binding => localBindingNames
}
private def importMapOf(nameKind: NameKind): ImportMap = nameKind match {
case NameKind.Type => typeImportMap
case NameKind.Binding => bindingImportMap
}
}
object LocalModule {
/**
* An import map can immediately resolve the name paths for all imported simple names.
*/
type ImportMap = Map[String, NamePath]
}
| marcopennekamp/lore | compiler/src/lore/compiler/semantics/modules/LocalModule.scala | Scala | mit | 3,628 |
class DelayedInit2 extends DelayedInit {
println("in constructor")
var initialization: () => Unit = _
override def delayedInit(body: => Unit) {
initialization = () => body
}
}
| grzegorzbalcerek/scala-book-examples | examples/DelayedInit2.scala | Scala | mit | 188 |
package com.twitter.finatra.multiserver.CombinedServer
import javax.inject.Singleton
@Singleton
class AdderService {
def add1(number: Int): Int = {
number + 1
}
def add1String(number: String): String = {
(number.toInt + 1).toString
}
}
| syamantm/finatra | inject-thrift-client-http-mapper/src/test/scala/com/twitter/finatra/multiserver/CombinedServer/AdderService.scala | Scala | apache-2.0 | 256 |
package hr.element.beepo.client
import email._
import sms._
import io._
import xml._
sealed trait Action
case object Persist extends Action
case object Send extends Action {
def apply(id: String, otherIDs: String*): Send =
Send(id +: otherIDs)
}
case class Send(val ids: Seq[String]) extends xml.SendXMLConverter with Communicator {
def send(): String =
send(Send)
}
object Task {
def apply(email: Email): Task =
Task(None, Seq(email), Nil)
def apply(sms: Sms): Task =
Task(None, Nil, Seq(sms))
def apply(email: Email, Sms: Sms): Task =
Task(None, Seq(email), Seq(Sms))
def apply(requestID: String, email: Email): Task =
Task(Some(requestID), Seq(email), Nil)
def apply(requestID: String, Sms: Sms): Task =
Task(Some(requestID), Nil, Seq(Sms))
def apply(requestID: String, email: Email, Sms: Sms): Task =
Task(Some(requestID), Seq(email), Seq(Sms))
}
case class Task(
requestID: Option[String]
, emails: Seq[Email]
, smses: Seq[Sms]) extends xml.TaskXMLConverter with Communicator {
def setRequestID(requestID: String) =
copy(requestID = Some(requestID))
def add(email: Email, otherEmails: Email*) =
copy(emails = (emails :+ email) ++ otherEmails)
def add(sms: Sms, otherSmses: Sms*) =
copy(smses = (smses :+ sms) ++ otherSmses)
def persist(): String =
send(Persist)
def send(): String =
send(Send)
}
| element-doo/beepo | code/scala/client/src/main/scala/hr/element/beepo/client/Task.scala | Scala | bsd-3-clause | 1,403 |
package main
import util.control.Exception.allCatch
/**
* @author Tobin Yehle
*/
object Main {
def main(args: Array[String]): Unit =
parseCommandLine(args).extract(println(s"Got ${args.mkString(", ")}, expected a single number"),
n => println(largestPalindrome(n)))
/** Define extract for options. This is like haskell's maybe function. */
implicit class FancyOption[+A](val inner: Option[A]) {
def extract[B](default: => B, conversion: A => B): B = inner.map(conversion).getOrElse(default)
}
/**
* Try to parse the command line args
*/
def parseCommandLine(args: Array[String]): Option[Long] = {
args match {
case Array(number) => allCatch.opt(number.toLong)
case _ => None
}
}
def largestPalindrome(n: Long): Long = {
val high = math.pow(10, n).toInt - 1
val low = math.pow(10, n-1).toInt
products(high, low).find(isPalindrome).getOrElse(throw new RuntimeException("No palindrome found"))
}
def isPalindrome(n: Long): Boolean = {
val s = n.toString
s == s.reverse
}
/**
* Generate a list of pairs in order of their product
* @param high The largest value in the pair
* @param low The smallest value in the pair
*/
def products(high: Long, low: Long): Stream[Long] = {
case class Pair(a: Long, b: Long) extends Ordered[Pair] {
override def compare(that: Pair): Int = (a*b) compare (that.a * that.b)
}
def genStream(fringe: SkewHeap[Pair]): Stream[Long] = fringe.firstView match {
case None => Stream.empty
case Some((Pair(a, b), rest)) => (a * b) #:: genStream(if(a > b) rest.add(Pair(a-1, b)) else rest)
}
genStream(SkewHeap((low to high).map(Pair(high, _)): _*))
}
}
| tyehle/programming-studio | 2017-W27/tobin/src/main/scala/main/Main.scala | Scala | mit | 1,760 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.controller
import _root_.org.apache.predictionio.controller.java.SerializableComparator
import org.apache.predictionio.core.BaseEngine
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import org.apache.spark.util.StatCounter
import scala.Numeric.Implicits._
import scala.reflect._
/** Base class of a [[Metric]].
*
* @tparam EI Evaluation information
* @tparam Q Query
* @tparam P Predicted result
* @tparam A Actual result
* @tparam R Metric result
* @group Evaluation
*/
abstract class Metric[EI, Q, P, A, R](implicit rOrder: Ordering[R])
extends Serializable {
/** Java friendly constructor
*
* @param comparator A serializable comparator for sorting the metric results.
*
*/
def this(comparator: SerializableComparator[R]) = {
this()(Ordering.comparatorToOrdering(comparator))
}
/** Class name of this [[Metric]]. */
def header: String = this.getClass.getSimpleName
/** Calculates the result of this [[Metric]]. */
def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])]): R
/** Comparison function for R's ordering. */
def compare(r0: R, r1: R): Int = rOrder.compare(r0, r1)
}
private[predictionio] trait StatsMetricHelper[EI, Q, P, A] {
def calculate(q: Q, p: P, a: A): Double
def calculateStats(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
: StatCounter = {
val doubleRDD = sc.union(
evalDataSet.map { case (_, qpaRDD) =>
qpaRDD.map { case (q, p, a) => calculate(q, p, a) }
}
)
doubleRDD.stats()
}
}
private[predictionio] trait StatsOptionMetricHelper[EI, Q, P, A] {
def calculate(q: Q, p: P, a: A): Option[Double]
def calculateStats(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
: StatCounter = {
val doubleRDD = sc.union(
evalDataSet.map { case (_, qpaRDD) =>
qpaRDD.flatMap { case (q, p, a) => calculate(q, p, a) }
}
)
doubleRDD.stats()
}
}
/** Returns the global average of the score returned by the calculate method.
*
* @tparam EI Evaluation information
* @tparam Q Query
* @tparam P Predicted result
* @tparam A Actual result
*
* @group Evaluation
*/
abstract class AverageMetric[EI, Q, P, A]
extends Metric[EI, Q, P, A, Double]
with StatsMetricHelper[EI, Q, P, A]
with QPAMetric[Q, P, A, Double] {
/** Implement this method to return a score that will be used for averaging
* across all QPA tuples.
*/
override def calculate(q: Q, p: P, a: A): Double
override def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
: Double = {
calculateStats(sc, evalDataSet).mean
}
}
/** Returns the global average of the non-None score returned by the calculate
* method.
*
* @tparam EI Evaluation information
* @tparam Q Query
* @tparam P Predicted result
* @tparam A Actual result
*
* @group Evaluation
*/
abstract class OptionAverageMetric[EI, Q, P, A]
extends Metric[EI, Q, P, A, Double]
with StatsOptionMetricHelper[EI, Q, P, A]
with QPAMetric[Q, P, A, Option[Double]] {
/** Implement this method to return a score that will be used for averaging
* across all QPA tuples.
*/
override def calculate(q: Q, p: P, a: A): Option[Double]
override def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
: Double = {
calculateStats(sc, evalDataSet).mean
}
}
/** Returns the global standard deviation of the score returned by the calculate method
*
* This method uses org.apache.spark.util.StatCounter library, a one pass
* method is used for calculation
*
* @tparam EI Evaluation information
* @tparam Q Query
* @tparam P Predicted result
* @tparam A Actual result
*
* @group Evaluation
*/
abstract class StdevMetric[EI, Q, P, A]
extends Metric[EI, Q, P, A, Double]
with StatsMetricHelper[EI, Q, P, A]
with QPAMetric[Q, P, A, Double] {
/** Implement this method to return a score that will be used for calculating
* the stdev
* across all QPA tuples.
*/
override def calculate(q: Q, p: P, a: A): Double
override def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
: Double = {
calculateStats(sc, evalDataSet).stdev
}
}
/** Returns the global standard deviation of the non-None score returned by the calculate method
*
* This method uses org.apache.spark.util.StatCounter library, a one pass
* method is used for calculation
*
* @tparam EI Evaluation information
* @tparam Q Query
* @tparam P Predicted result
* @tparam A Actual result
*
* @group Evaluation
*/
abstract class OptionStdevMetric[EI, Q, P, A]
extends Metric[EI, Q, P, A, Double]
with StatsOptionMetricHelper[EI, Q, P, A]
with QPAMetric[Q, P, A, Option[Double]] {
/** Implement this method to return a score that will be used for calculating
* the stdev
* across all QPA tuples.
*/
override def calculate(q: Q, p: P, a: A): Option[Double]
override def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
: Double = {
calculateStats(sc, evalDataSet).stdev
}
}
/** Returns the sum of the score returned by the calculate method.
*
* @tparam EI Evaluation information
* @tparam Q Query
* @tparam P Predicted result
* @tparam A Actual result
* @tparam R Result, output of the function calculate, must be Numeric
*
* @group Evaluation
*/
abstract class SumMetric[EI, Q, P, A, R: ClassTag](implicit num: Numeric[R])
extends Metric[EI, Q, P, A, R]()(num)
with QPAMetric[Q, P, A, R] {
/** Implement this method to return a score that will be used for summing
* across all QPA tuples.
*/
override def calculate(q: Q, p: P, a: A): R
override def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])])
: R = {
val union: RDD[R] = sc.union(
evalDataSet.map { case (_, qpaRDD) =>
qpaRDD.map { case (q, p, a) => calculate(q, p, a) }
}
)
union.aggregate[R](num.zero)(_ + _, _ + _)
}
}
/** Returns zero. Useful as a placeholder during evaluation development when not all components are
* implemented.
* @tparam EI Evaluation information
* @tparam Q Query
* @tparam P Predicted result
* @tparam A Actual result
*
* @group Evaluation
*/
class ZeroMetric[EI, Q, P, A] extends Metric[EI, Q, P, A, Double]() {
override def calculate(sc: SparkContext, evalDataSet: Seq[(EI, RDD[(Q, P, A)])]): Double = 0.0
}
/** Companion object of [[ZeroMetric]]
*
* @group Evaluation
*/
object ZeroMetric {
/** Returns a ZeroMetric instance using Engine's type parameters. */
def apply[EI, Q, P, A](engine: BaseEngine[EI, Q, P, A]): ZeroMetric[EI, Q, P, A] = {
new ZeroMetric[EI, Q, P, A]()
}
}
/** Trait for metric which returns a score based on Query, PredictedResult,
* and ActualResult
*
* @tparam Q Query class
* @tparam P Predicted result class
* @tparam A Actual result class
* @tparam R Metric result class
* @group Evaluation
*/
trait QPAMetric[Q, P, A, R] {
/** Calculate a metric result based on query, predicted result, and actual
* result
*
* @param q Query
* @param p Predicted result
* @param a Actual result
* @return Metric result
*/
def calculate(q: Q, p: P, a: A): R
}
| PredictionIO/PredictionIO | core/src/main/scala/org/apache/predictionio/controller/Metric.scala | Scala | apache-2.0 | 8,129 |
/**
* © 2019 Refinitiv. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.common.formats
import com.fasterxml.jackson.core.JsonFactory
import com.typesafe.config.ConfigFactory
import org.joda.time.DateTimeZone
import org.joda.time.format.ISODateTimeFormat
/**
* Created by gilad on 2/26/15.
*/
object SettingsHelper {
val config = ConfigFactory.load()
val dataCenter = config.getString("dataCenter.id")
}
class AbstractJsonSerializer {
val jsonFactory = new JsonFactory()
val dateFormatter = ISODateTimeFormat.dateTime().withZone(DateTimeZone.UTC)
}
trait NsSplitter {
def splitNamespaceField(field: String) = field.lastIndexOf(".") match {
case -1 => "nn" -> field
case i => field.substring(i + 1) -> field.substring(0, i).replace('.', '_')
}
def reverseNsTypedField(field: String) = {
if (field == "_all") "allFields"
else if (field.startsWith("system.") || field.startsWith("content.") || field.startsWith("link.")) field
else {
val (ns, typedField) = splitNamespaceField(field)
"fields." + ns + "." + typedField.replace('.', '_')
}
}
}
| e-orz/CM-Well | server/cmwell-common/src/main/scala/cmwell/common/formats/AbstractJsonSerializer.scala | Scala | apache-2.0 | 1,669 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
import scala.scalajs.js
/** This class provides a simple way to get unique objects for equal strings.
* Since symbols are interned, they can be compared using reference equality.
* Instances of `Symbol` can be created easily with Scala's built-in quote
* mechanism.
*
* For instance, the [[http://scala-lang.org/#_top Scala]] term `'mysym` will
* invoke the constructor of the `Symbol` class in the following way:
* `Symbol("mysym")`.
*
* @author Martin Odersky, Iulian Dragos
* @version 1.8
*/
final class Symbol private (val name: String) extends Serializable {
/** Converts this symbol to a string.
*/
override def toString(): String = "'" + name
@throws(classOf[java.io.ObjectStreamException])
private def readResolve(): Any = Symbol.apply(name)
override def hashCode = name.hashCode()
override def equals(other: Any) = this eq other.asInstanceOf[AnyRef]
}
// Modified to use Scala.js specific cache
object Symbol extends JSUniquenessCache[Symbol] {
override def apply(name: String): Symbol = super.apply(name)
protected def valueFromKey(name: String): Symbol = new Symbol(name)
protected def keyFromValue(sym: Symbol): Option[String] = Some(sym.name)
}
private[scala] abstract class JSUniquenessCache[V]
{
private val cache = js.Dictionary.empty[V]
protected def valueFromKey(k: String): V
protected def keyFromValue(v: V): Option[String]
def apply(name: String): V =
cache.getOrElseUpdate(name, valueFromKey(name))
def unapply(other: V): Option[String] = keyFromValue(other)
}
/** This is private so it won't appear in the library API, but
* abstracted to offer some hope of reusability. */
/* DELETED for Scala.js
private[scala] abstract class UniquenessCache[K >: js.String, V >: Null]
{
import java.lang.ref.WeakReference
import java.util.WeakHashMap
import java.util.concurrent.locks.ReentrantReadWriteLock
private val rwl = new ReentrantReadWriteLock()
private val rlock = rwl.readLock
private val wlock = rwl.writeLock
private val map = new WeakHashMap[K, WeakReference[V]]
protected def valueFromKey(k: K): V
protected def keyFromValue(v: V): Option[K]
def apply(name: K): V = {
def cached(): V = {
rlock.lock
try {
val reference = map get name
if (reference == null) null
else reference.get // will be null if we were gc-ed
}
finally rlock.unlock
}
def updateCache(): V = {
wlock.lock
try {
val res = cached()
if (res != null) res
else {
// If we don't remove the old String key from the map, we can
// wind up with one String as the key and a different String as
// as the name field in the Symbol, which can lead to surprising
// GC behavior and duplicate Symbols. See SI-6706.
map remove name
val sym = valueFromKey(name)
map.put(name, new WeakReference(sym))
sym
}
}
finally wlock.unlock
}
val res = cached()
if (res == null) updateCache()
else res
}
def unapply(other: V): Option[K] = keyFromValue(other)
}
*/
| mdedetrich/scala-js | scalalib/overrides/scala/Symbol.scala | Scala | bsd-3-clause | 3,678 |
package safe.dsp
import safe.SafeVector
/**
* Functions for calculating spectral shape statistics including:
* $ - Centroid
* $ - Spread
* $ - Skewness
* $ - Kurtosis
*
* Based on spectral shape parameters outlined in:
*
* 1. "Automatic Transcription of Drum Loops"
* O. Gillet and G. Richard
* IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP), Montreal, Canada, 2004
*/
object SpectralShape {
/** Calculate the spectral centroid from the magnitude spectrum */
def centroid(magSpecData: SafeVector[Double]) =
statistics(magSpecData)(0)
/** Calculate the spectral spread from the magnitude spectrum */
def spread(magSpecData: SafeVector[Double]) =
statistics(magSpecData)(1)
/** Calculate the spectral skewness from the magnitude spectrum */
def skewness(magSpecData: SafeVector[Double]) =
statistics(magSpecData)(2)
/** Calculate the spectral kurtosis from the magnitude spectrum */
def kurtosis(magSpecData: SafeVector[Double]) =
statistics(magSpecData)(3)
/** Calculate the spectral centroid, spread, skewness, and kurtosis
* from the magnitude spectrum
*/
def statistics(magSpecData: SafeVector[Double]) = {
val out = Array(0.0, 0.0, 0.0, 0.0)
val sum = magSpecData.sum
if (sum > 0.0) {
val ms = Array(0.0, 0.0, 0.0, 0.0)
var i = 0
while (i < magSpecData.length) {
var v = magSpecData(i)
v *= i; ms(0) += v
v *= i; ms(1) += v
v *= i; ms(2) += v
v *= i; ms(3) += v
i += 1
}
for (i <- 0 until ms.length) ms(i) /= sum
// Centroid
out(0) = ms(0)
// Spread
out(1) = math.sqrt(ms(1) - math.pow(ms(0), 2))
// Skewness
out(2) = (2 * math.pow(ms(0), 3) - 3 * ms(0) * ms(1) + ms(2)) / math.pow(out(1), 3)
// Kurtosis
out(3) = (-3 * math.pow(ms(0), 4) + 6 * ms(0) * ms(1) - 4 * ms(0) * ms(2) + ms(3)) / math.pow(out(1), 4) - 3
}
SafeVector(out)
}
} | devonbryant/safe | safe-core/src/main/scala/safe/dsp/SpectralShape.scala | Scala | epl-1.0 | 2,061 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogdebugger.ui.fieldvisualizations.symmetrictensor
import libcog._
import scala.swing._
import cogdebugger.ui.fieldvisualizations.{Viewer, ZoomProperty}
import cogdebugger.ui.fieldvisualizations.scalar.ScalarMemoryView
import cogdebugger.ui.components.{PanPane, WrapPanel}
import cogdebugger.{RestorableState, PropertyValueChanged}
import scala.xml.{Node, Elem}
/** A panel which displays a complex field as a modulus panel and an
* argument panel, side by side.
*
* @param fieldType Shape of the complex field being displayed
*
* @author Greg Snider
*/
class SticknessOrientationSubpanel(fieldType: FieldType)
extends BorderPanel
with Viewer
with ZoomProperty
with RestorableState
{
def fieldShape = fieldType.fieldShape
require(fieldShape.dimensions == 2, "Only 2D stick tensor fields supported now")
val rows = fieldShape(0)
val columns = fieldShape(1)
/** Panel displaying stickness part of field. */
private val sticknessPanel = new ScalarMemoryView(fieldType)
/** Panel displaying orientation part of complex field. */
private val orientationPanel = new ScalarMemoryView(fieldType)
/** Zooming? */
var zoomIncrement = 1f
// Initialize
setFloatingMax(sticknessPanel)
setFloatingMax(orientationPanel)
private val prettyPanel = new WrapPanel(
wrapPanel(sticknessPanel, "stickness"),
wrapPanel(orientationPanel, "orientation")
)
add(prettyPanel, BorderPanel.Position.Center)
properties += ZoomProperty
/** Update the display with a new stick tensor field.
*
* @param src Requester of the update (?).
* @param data The new stick tensor field data
* @param time Current simulation time.
*/
def update(src: AnyRef, data: AbstractFieldMemory, time: Long) {
// Stick tensor fields use complex fields
val complexData = data.asInstanceOf[ComplexFieldMemory]
val magnitudeData = Array.ofDim[Float](rows, columns)
val orientationData = Array.ofDim[Float](rows, columns)
for (row <- 0 until rows; col <- 0 until columns) {
val complex: Complex = complexData.read(row, col)
magnitudeData(row)(col) = complex.magnitude
// Orientation is half of phase, and must be in the range (0, Pi]
var orientation = complex.phase / 2
if (orientation <= 0)
orientation += math.Pi.toFloat
orientationData(row)(col) = orientation
}
sticknessPanel.update(Seq(magnitudeData))
orientationPanel.update(Seq(orientationData))
}
// Rig up zoom functionality
listenTo(ZoomProperty)
reactions += {
case PropertyValueChanged(ZoomProperty, oldValue, newValue: Float) =>
sticknessPanel.zoomLevel = newValue
orientationPanel.zoomLevel = newValue
}
def save: Elem =
<SticknessOrientationSubpanel>
{ propertiesTag }
</SticknessOrientationSubpanel>
def restore(tag: Node) {
(tag \\ "SticknessOrientationSubpanel" \\ "properties").headOption.foreach(xmlToProperties)
}
/** Force floating max to be true. */
private def setFloatingMax(panel: ScalarMemoryView) {
//panel.viewerTools.floatMaxButton.selected = true
panel.FloatingMaxProperty.value = true
}
/** Wraps a panel with another panel, removing the wrapped panel's toolbar
* while adding a title and legend.
*
* @param panel The panel being wrapped.
* @param title The title displayed for the wrapped panel.
*/
private def wrapPanel(panel: ScalarMemoryView, title: String): BorderPanel = {
val label = new Label(title)
val legend = new BoxPanel(Orientation.Horizontal)
legend.contents ++= panel.toolbarComponents(panel.legendGroupIdx).components
new BorderPanel() {
border = Swing.EmptyBorder(2, 0, 2, 0)
add(label, BorderPanel.Position.North)
add(panel, BorderPanel.Position.Center)
add(legend, BorderPanel.Position.South)
}
}
} | hpe-cct/cct-core | src/main/scala/cogdebugger/ui/fieldvisualizations/symmetrictensor/SticknessOrientationSubpanel.scala | Scala | apache-2.0 | 4,508 |
////////////////////////////////////////////////////////////////////////////////
// //
// OpenSolid is a generic library for the representation and manipulation //
// of geometric objects such as points, curves, surfaces, and volumes. //
// //
// Copyright 2007-2015 by Ian Mackenzie //
// [email protected] //
// //
// This Source Code Form is subject to the terms of the Mozilla Public //
// License, v. 2.0. If a copy of the MPL was not distributed with this file, //
// you can obtain one at http://mozilla.org/MPL/2.0/. //
// //
////////////////////////////////////////////////////////////////////////////////
package org.opensolid.core
import scala.math
import scala.util.Random
/** Represents a range of real numbers and allows mathematical operations on those ranges.
*
* Intervals support most of the same operations as floating-point numbers (sum, square root,
* sine, logarithm etc.) as well as some specific to intervals (hull, intersection etc.). Mixed
* operations such as the sum of an interval and a floating-point value are also supported and
* result in an interval.
*
* In general, mathematical operations on intervals result in an interval that contains all
* possible floating-point values that could result from applying the corresponding floating-point
* mathematical operation to on any combination of values from the inputs. For instance, the
* expression `Interval(1.0, 2.0) * Interval(-5.0, -3.0)` results in `Interval(-10.0, -3.0)` since
* the lowest possible product of values taken from those two intervals is `2.0 * (-5.0) == -10.0`
* and the highest possible product is `1.0 * (-3.0) == -3.0`.
*
* Note that not only the endpoints are considered - `Interval(0.0, math.Pi).sin` results in
* `Interval(0.0, 1.0)` even though `math.sin(0.0)` and `math.sin(math.Pi)` are both zero, since
* `math.sin(math.Pi / 2.0)` is 1.0 and `math.Pi / 2.0` is a possible value within
* `Interval(0.0, math.Pi)`.
*
* Examples:
* {{{
* scala> val a = Interval(2.0, 3.0)
* a: org.opensolid.core.Interval = Interval(2.0,3.0)
*
* scala> val b = 2.0 * a
* b: org.opensolid.core.Interval = Interval(4.0,6.0)
*
* scala> val c = a - b
* c: org.opensolid.core.Interval = Interval(-4.0,-1.0)
*
* scala> c.contains(-2.0)
* res0: Boolean = true
*
* scala> a.overlaps(b)
* res1: Boolean = false
*
* scala> c.lowerBound
* res2: Double = -4.0
*
* scala> c.upperBound
* res3: Double = -1.0
*
* scala> c.midpoint
* res4: Double = -2.5
*
* scala> c.width
* res5: Double = 3.0
* }}}
*/
final case class Interval(lowerBound: Double, upperBound: Double) extends Bounds[Interval]
with Bounded[Interval] {
/** Returns a tuple containing the lower and upper bounds of this interval. */
def endpoints: (Double, Double) =
(lowerBound, upperBound)
override def equals(other: Any): Boolean = other match {
case that: Interval =>
(this.lowerBound == that.lowerBound && this.upperBound == that.upperBound) ||
(this.isEmpty && that.isEmpty)
case _ => false
}
override def bounds: Interval =
this
override def hashCode: Int =
(lowerBound, upperBound).hashCode
override def toString: String = {
if (isEmpty) {
"Interval.Empty"
} else if (isWhole) {
"Interval.Whole"
} else {
s"Interval($lowerBound, $upperBound)"
}
}
/** Returns true if this is the empty interval (contains no values). Note that a singleton
* interval (one with zero width) is not considered empty since it contains a single value.
*
* Equivalent to `this == Interval.Empty`.
*/
def isEmpty: Boolean =
lowerBound.isNaN && upperBound.isNaN
/** Returns true if this interval represents the entire range of possible real values (from
* negative infinity to positive infinity).
*
* Equivalent to `this == Interval.Whole`.
*/
def isWhole: Boolean =
lowerBound.isNegInfinity && upperBound.isPosInfinity
def isZero(tolerance: Double): Boolean =
lowerBound >= -tolerance && upperBound <= tolerance
def isNonZero(tolerance: Double): Boolean =
lowerBound > tolerance || upperBound < -tolerance
def isFinite: Boolean =
lowerBound > Double.NegativeInfinity && upperBound < Double.PositiveInfinity
/** Returns the width of this interval (the difference between the upper and lower bounds). */
def width: Double =
upperBound - lowerBound
/** Returns a value interpolated between the lower and upper bounds of this interval.
*
* Examples:
* {{{
* scala> val interval = Interval(2.0, 3.0)
* interval: org.opensolid.core.Interval = Interval(2.0,3.0)
*
* scala> interval.interpolated(0.0)
* res0: Double = 2.0
*
* scala> interval.interpolated(1.0)
* res1: Double = 3.0
*
* scala> interval.interpolated(0.5)
* res2: Double = 2.5
*
* scala> interval.interpolated(2.0)
* res3: Double = 4.0
* }}}
*/
def interpolated(value: Double): Double = {
val width = this.width
if (width < Double.PositiveInfinity) {
lowerBound + value * width
} else if (width.isNaN || value.isNaN) {
Double.NaN
} else if (isWhole) {
if (value <= 0.0) {
Double.NegativeInfinity
} else if (value >= 1.0) {
Double.PositiveInfinity
} else {
Double.NaN
}
} else if (lowerBound.isInfinity) {
if (value < 1.0) {
Double.NegativeInfinity
} else if (value > 1.0) {
Double.PositiveInfinity
} else {
upperBound
}
} else {
if (value < 0.0) {
Double.NegativeInfinity
} else if (value > 0.0) {
Double.PositiveInfinity
} else {
lowerBound
}
}
}
/** Returns a value halfway between the lower and upper bounds of this interval. */
def midpoint: Double =
interpolated(0.5)
/** Returns a random value within this interval. */
def randomValue: Double =
randomValue(Random)
/** Returns a random value within this interval, using the provided generator. */
def randomValue(generator: Random): Double =
interpolated(generator.nextDouble)
/** Returns true if this interval consists of a single value (the upper and lower bounds are
* equal).
*/
def isSingleton: Boolean =
lowerBound == upperBound
/** Returns a pair of intervals equal to this interval split into two halves.
*
* If this interval has finite width, then the split point is this interval's midpoint and the
* two returned intervals are `Interval(lowerBound, midpoint)` and
* `Interval(midpoint, upperBound)`. Otherwise, a set of heuristics is used to find a reasonable
* split point.
*
* Examples:
* {{{
* scala> Interval(2.0, 3.0).bisected
* res0: (org.opensolid.core.Interval, org.opensolid.core.Interval) =
* (Interval(2.0, 2.5),Interval(2.5, 3.0))
*
* scala> Interval.Whole.bisected
* res1: (org.opensolid.core.Interval, org.opensolid.core.Interval) =
* (Interval(-Infinity, 0.0),Interval(0.0, Infinity))
*
* scala> Interval(0.0, Double.PositiveInfinity).bisected
* res2: (org.opensolid.core.Interval, org.opensolid.core.Interval) =
* (Interval(0.0, 1.0),Interval(1.0, Infinity))
*
* scala> Interval(Double.NegativeInfinity, -10.0).bisected
* res3: (org.opensolid.core.Interval, org.opensolid.core.Interval) =
* (Interval(-Infinity, -20.0),Interval(-20.0, -10.0))
*
* scala> Interval.Empty.bisected
* res4: (org.opensolid.core.Interval, org.opensolid.core.Interval) =
* (Interval.Empty,Interval.Empty)
* }}}
*/
def bisected: (Interval, Interval) =
if (isEmpty) {
(Interval.Empty, Interval.Empty)
} else {
val mid =
if (isWhole) {
0.0
} else if (lowerBound.isNegInfinity) {
if (upperBound > 0.0) {
0.0
} else if (upperBound <= -0.5) {
2.0 * upperBound
} else {
-1.0
}
} else if (upperBound.isPosInfinity) {
if (lowerBound < 0.0) {
0.0
} else if (lowerBound >= 0.5) {
2.0 * lowerBound
} else {
1.0
}
} else {
this.midpoint
}
(Interval(lowerBound, mid), Interval(mid, upperBound))
}
def bisectedAt(value: Double): (Interval, Interval) =
if (isEmpty || value.isNaN) {
(Interval.Empty, Interval.Empty)
} else if (value < lowerBound) {
(Interval.Empty, this)
} else if (value > upperBound) {
(this, Interval.Empty)
} else {
(Interval(lowerBound, value), Interval(value, upperBound))
}
override def bisected(index: Int): (Interval, Interval) =
bisected
override def expandedBy(value: Double): Interval =
if (isEmpty || value.isNaN || value.isNegInfinity) {
Interval.Empty
} else if (value.isPosInfinity) {
if (lowerBound.isPosInfinity) this else Interval.Whole
} else {
val lowerBound = this.lowerBound - value
val upperBound = this.upperBound + value
if (lowerBound <= upperBound) Interval(lowerBound, upperBound) else Interval.Empty
}
/** Returns a new interval that contains both this interval and the given value. */
def hull(value: Double): Interval =
if (value < lowerBound) {
Interval(value, upperBound)
} else if (value > upperBound) {
Interval(lowerBound, value)
} else if (isEmpty) {
Interval.singleton(value)
} else {
// value is NaN or inside interval
this
}
/** Returns a new interval that contains both this interval and the given interval. */
override def hull(that: Interval): Interval =
if (isEmpty) {
that
} else if (that.isEmpty) {
this
} else {
Interval(lowerBound.min(that.lowerBound), upperBound.max(that.upperBound))
}
/** Returns a new interval that contains all values common to both this interval and the given
* interval. If the two intervals do not overlap at all then the empty interval is returned.
*/
def intersection(that: Interval): Interval = {
val lowerBound = this.lowerBound.max(that.lowerBound)
val upperBound = this.upperBound.min(that.upperBound)
if (lowerBound <= upperBound) Interval(lowerBound, upperBound) else Interval.Empty
}
/** Returns true if the given value is between the upper and lower bounds of this interval. */
def contains(value: Double): Boolean =
value >= lowerBound && value <= upperBound
/** Returns true if this interval fully contains the given interval (that is, this interval
* contains both the upper and lower bounds of the given interval).
*
* Examples:
* {{{
* scala> Interval(5, 10).contains(Interval(7, 8))
* res0: Boolean = true
* scala> Interval(5, 10).contains(Interval(9, 10))
* res1: Boolean = true
* scala> Interval(5, 10).contains(Interval(8, 12))
* res2: Boolean = false
* }}}
*/
override def contains(that: Interval): Boolean =
that.lowerBound >= this.lowerBound && that.upperBound <= this.upperBound
/** Returns true if this interval overlaps the given interval.
*
* Examples:
* {{{
* scala> Interval(2, 4).overlaps(Interval(3, 5))
* res0: Boolean = true
*
* scala> Interval(5, 10).overlaps(Interval(6, 7))
* res1: Boolean = true
*
* scala> Interval(2, 4).overlaps(Interval(6, 8))
* res2: Boolean = false
*
* scala> Interval(0, 1).overlaps(Interval.Whole)
* res3: Boolean = true
*
* scala> Interval(0, 1).overlaps(Interval.Empty)
* res4: Boolean = false
* }}}
*/
override def overlaps(that: Interval): Boolean =
that.lowerBound <= this.upperBound && that.upperBound >= this.lowerBound
override def component(index: Int): Interval =
this
def unary_- : Interval =
Interval(-upperBound, -lowerBound)
def negated: Interval =
-this
def reciprocal: Interval =
if (lowerBound > 0.0 || upperBound < 0.0) {
Interval(1.0 / upperBound, 1.0 / lowerBound)
} else if (lowerBound < 0.0 && upperBound == 0.0) {
Interval(Double.NegativeInfinity, 1.0 / lowerBound)
} else if (lowerBound == 0.0 && upperBound > 0.0) {
Interval(1.0 / upperBound, Double.PositiveInfinity)
} else if (isEmpty) {
Interval.Empty
} else {
Interval.Whole
}
def +(value: Double): Interval =
Interval.nondecreasing(lowerBound + value, upperBound + value)
def plus(value: Double): Interval =
this + value
def +(that: Interval): Interval =
Interval.nondecreasing(this.lowerBound + that.lowerBound, this.upperBound + that.upperBound)
def plus(that: Interval): Interval =
this + that
def -(value: Double): Interval =
Interval.nondecreasing(lowerBound - value, upperBound - value)
def minus(value: Double): Interval =
this - value
def -(that: Interval): Interval =
Interval.nondecreasing(this.lowerBound - that.upperBound, this.upperBound - that.lowerBound)
def minus(that: Interval): Interval =
this - that
def *(value: Double): Interval =
if (value > 0.0) {
Interval.nondecreasing(lowerBound * value, upperBound * value)
} else if (value < 0.0) {
Interval.nondecreasing(upperBound * value, lowerBound * value)
} else if (value == 0.0) {
Interval.Zero
} else {
Interval.Empty
}
def times(value: Double): Interval =
this * value
def *(that: Interval): Interval =
if (this == Interval.Zero && that == Interval.Whole) {
Interval.Zero
} else {
(this * that.lowerBound).hull(this * that.upperBound)
}
def times(that: Interval): Interval =
this * that
def /(value: Double): Interval =
if (isEmpty || value.isNaN) {
Interval.Empty
} else if (value.isInfinity) {
if (lowerBound.isPosInfinity || upperBound.isNegInfinity) Interval.Empty else Interval.Zero
} else if (value > 0.0) {
Interval(lowerBound / value, upperBound / value)
} else if (value < 0.0) {
Interval(upperBound / value, lowerBound / value)
} else if (value == 0.0) {
if (lowerBound == 0.0 && upperBound == 0.0) {
Interval.Empty
} else if (lowerBound >= 0.0) {
Interval.PositiveInfinity
} else if (upperBound <= 0.0) {
Interval.NegativeInfinity
} else {
Interval.Whole
}
} else {
Interval.Empty
}
def dividedBy(value: Double): Interval =
this / value
def /(that: Interval): Interval =
this * that.reciprocal
def dividedBy(that: Interval): Interval =
this / that
def abs: Interval =
if (isEmpty) {
Interval.Empty
} else if (lowerBound >= 0.0) {
this
} else if (upperBound <= 0.0) {
-this
} else if (-lowerBound < upperBound) {
Interval(0.0, upperBound)
} else {
Interval(0.0, -lowerBound)
}
def squared: Interval =
if (isEmpty) {
Interval.Empty
} else if (lowerBound >= 0.0) {
Interval(lowerBound * lowerBound, upperBound * upperBound)
} else if (upperBound <= 0.0) {
Interval(upperBound * upperBound, lowerBound * lowerBound)
} else if (-lowerBound < upperBound) {
Interval(0.0, upperBound * upperBound)
} else {
Interval(0.0, lowerBound * lowerBound)
}
def sqrt: Interval =
if (isEmpty || upperBound < 0.0) {
Interval.Empty
} else {
Interval(math.sqrt(lowerBound.max(0.0)), math.sqrt(upperBound))
}
def sin: Interval =
if (isEmpty) {
Interval.Empty
} else if (isSingleton) {
Interval.singleton(math.sin(lowerBound))
} else {
val (hasMin, hasMax) = (this - math.Pi / 2.0).cosHasMinMax
if (hasMin && hasMax) {
Interval.SinCosFullRange
} else {
val sinLower = math.sin(this.lowerBound)
val sinUpper = math.sin(this.upperBound)
val lowerBound = if (hasMin) -1.0 else sinLower.min(sinUpper)
val upperBound = if (hasMax) 1.0 else sinLower.max(sinUpper)
Interval(lowerBound, upperBound)
}
}
def cos: Interval =
if (isEmpty) {
Interval.Empty
} else if (isSingleton) {
Interval.singleton(math.cos(lowerBound))
} else {
val (hasMin, hasMax) = cosHasMinMax
if (hasMin && hasMax) {
Interval.SinCosFullRange
} else {
val cosLower = math.cos(this.lowerBound)
val cosUpper = math.cos(this.upperBound)
val lowerBound = if (hasMin) -1.0 else cosLower.min(cosUpper)
val upperBound = if (hasMax) 1.0 else cosLower.max(cosUpper)
Interval(lowerBound, upperBound)
}
}
private def cosHasMinMax: (Boolean, Boolean) = {
val abs = this.abs
if (abs.upperBound.isInfinity) {
(true, true)
} else {
val width = abs.width
val hasMin = (abs.upperBound + math.Pi) % (2 * math.Pi) <= width
val hasMax = abs.upperBound % (2 * math.Pi) <= width
(hasMin, hasMax)
}
}
def tan: Interval = {
val abs = this.abs
if (abs.upperBound.isInfinity) {
Interval.Whole
} else {
val hasSingularity = (abs.upperBound + math.Pi / 2.0) % math.Pi <= abs.width
if (hasSingularity) Interval.Whole else Interval(math.tan(lowerBound), math.tan(upperBound))
}
}
def asin: Interval =
if (isEmpty || lowerBound > 1.0 || upperBound < -1.0) {
Interval.Empty
} else {
Interval(math.asin(lowerBound.max(-1.0)), math.asin(upperBound.min(1.0)))
}
def acos: Interval =
if (isEmpty || lowerBound > 1.0 || upperBound < -1.0) {
Interval.Empty
} else {
Interval(math.acos(upperBound.min(1.0)), math.acos(lowerBound.max(-1.0)))
}
def atan: Interval =
Interval(math.atan(lowerBound), math.atan(upperBound))
def exp: Interval =
Interval(math.exp(lowerBound), math.exp(upperBound))
def log: Interval =
if (isEmpty || upperBound < 0.0) {
Interval.Empty
} else {
Interval(math.log(lowerBound.max(0.0)), math.log(upperBound))
}
def ulp: Double =
math.ulp(lowerBound).max(math.ulp(upperBound))
}
object Interval {
def fromEndpoints(endpoints: (Double, Double)): Interval = endpoints match {
case (lowerBound, upperBound) => Interval(lowerBound, upperBound)
}
def singleton(value: Double): Interval =
Interval(value, value)
/** The empty interval (contains no values).
*
* This is returned in situations such as the intersection of two non-overlapping intervals, the
* square root of an interval containing only negative values, or the sum of the empty interval
* and any other interval.
*/
val Empty: Interval = new Interval(Double.NaN, Double.NaN)
val Whole: Interval = new Interval(Double.NegativeInfinity, Double.PositiveInfinity)
val Unit: Interval = new Interval(0.0, 1.0)
val Zero: Interval = new Interval(0.0, 0.0)
val NegativeInfinity: Interval = Interval(Double.NegativeInfinity, Double.NegativeInfinity)
val PositiveInfinity: Interval = Interval(Double.PositiveInfinity, Double.PositiveInfinity)
val PositiveHalf: Interval = Interval(0.0, Double.PositiveInfinity)
val NegativeHalf: Interval = Interval(Double.NegativeInfinity, 0.0)
private[Interval] val SinCosFullRange = Interval(-1.0, 1.0)
private[Interval] def nondecreasing(lowerBound: Double, upperBound: Double): Interval =
if (lowerBound <= upperBound) {
Interval(lowerBound, upperBound)
} else if (!lowerBound.isNaN) {
singleton(lowerBound)
} else if (!upperBound.isNaN) {
singleton(upperBound)
} else {
Empty
}
}
| ianmackenzie/opensolid-core | src/main/scala/org/opensolid/core/Interval.scala | Scala | mpl-2.0 | 20,109 |
package com.mogproject.mogami.core.attack
import com.mogproject.mogami._
/**
*
*/
trait DirectAttack {
def getDirectAttack(piece: Piece, square: Square): BitBoard =
baseAttack.getOrElse(piece.ptype, baseAttack(GOLD)).flipByPlayer(piece.owner).shiftRight(5 - square.file).shiftUp(5 - square.rank)
private[this] val baseAttack: Map[Ptype, BitBoard] = (Seq(PAWN, KNIGHT, SILVER, GOLD, KING) zip BitBoard.seq(
"""
|--------- --------- --------- --------- ---------
|--------- --------- --------- --------- ---------
|--------- ---*-*--- --------- --------- ---------
|----*---- --------- ---***--- ---***--- ---***---
|--------- --------- --------- ---*-*--- ---*-*---
|--------- --------- ---*-*--- ----*---- ---***---
|--------- --------- --------- --------- ---------
|--------- --------- --------- --------- ---------
|--------- --------- --------- --------- ---------
""".stripMargin)).toMap
}
| mogproject/mog-core-scala | shared/src/main/scala/com/mogproject/mogami/core/attack/DirectAttack.scala | Scala | apache-2.0 | 972 |
package fpinscala.gettingstarted
// A comment!
/* Another comment */
/** A documentation comment */
object MyModule {
def abs(n: Int): Int =
if (n < 0) -n
else n
private def formatAbs(x: Int) = {
val msg = "The absolute value of %d is %d"
msg.format(x, abs(x))
}
def main(args: Array[String]): Unit =
println(formatAbs(-42))
// A definition of factorial, using a local, tail recursive function
def factorial(n: Int): Int = {
@annotation.tailrec
def go(n: Int, acc: Int): Int =
if (n <= 0) acc
else go(n-1, n*acc)
go(n, 1)
}
// Another implementation of `factorial`, this time with a `while` loop
def factorial2(n: Int): Int = {
var acc = 1
var i = n
while (i > 0) { acc *= i; i -= 1 }
acc
}
// Exercise 1: Write a function to compute the nth fibonacci number
def fib(n: Int): Int = {
@annotation.tailrec
def go(f1: Int, f2: Int, n: Int): Int = {
if (n == 0) f1 else go(f2, f1 + f2, n - 1)
}
go(0, 1, n)
}
// This definition and `formatAbs` are very similar..
private def formatFactorial(n: Int) = {
val msg = "The absolute value of %d is %d."
msg.format(n, factorial(n))
}
// We can generalize `formatAbs` and `formatFactorial` to
// accept a _function_ as a parameter
def formatResult(name: String, n: Int, f: Int => Int) = {
val msg = "The %s of %d is %d."
msg.format(name, n, f(n))
}
}
object FormatAbsAndFactorial {
import MyModule._
// Now we can use our general `formatResult` function
// with both `abs` and `factorial`
def main(args: Array[String]): Unit = {
println(formatResult("absolute value", -42, abs))
println(formatResult("factorial", 7, factorial))
}
}
// Functions get passed around so often in FP that it's
// convenient to have syntax for constructing a function
// *without* having to give it a name
object AnonymousFunctions {
import MyModule._
// Some examples of anonymous functions:
def main(args: Array[String]): Unit = {
println(formatResult("absolute value", -42, abs))
println(formatResult("factorial", 7, factorial))
println(formatResult("increment", 7, (x: Int) => x + 1))
println(formatResult("increment2", 7, (x) => x + 1))
println(formatResult("increment3", 7, x => x + 1))
println(formatResult("increment4", 7, _ + 1))
println(formatResult("increment5", 7, x => { val r = x + 1; r }))
}
}
object MonomorphicBinarySearch {
// First, a binary search implementation, specialized to `Double`,
// another primitive type in Scala, representing 64-bit floating
// point numbers
// Ideally, we could generalize this to work for any `Array` type,
// so long as we have some way of comparing elements of the `Array`
def binarySearch(ds: Array[Double], key: Double): Int = {
@annotation.tailrec
def go(low: Int, mid: Int, high: Int): Int = {
if (low > high) -mid - 1
else {
val mid2 = (low + high) / 2
val d = ds(mid2) // We index into an array using the same
// syntax as function application
if (d == key) mid2
else if (d > key) go(low, mid2, mid2-1)
else go(mid2 + 1, mid2, high)
}
}
go(0, 0, ds.length - 1)
}
}
object PolymorphicFunctions {
// Here's a polymorphic version of `binarySearch`, parameterized on
// a function for testing whether an `A` is greater than another `A`.
def binarySearch[A](as: Array[A], key: A, gt: (A,A) => Boolean): Int = {
@annotation.tailrec
def go(low: Int, mid: Int, high: Int): Int = {
if (low > high) -mid - 1
else {
val mid2 = (low + high) / 2
val a = as(mid2)
val greater = gt(a, key)
if (!greater && !gt(key,a)) mid2
else if (greater) go(low, mid2, mid2-1)
else go(mid2 + 1, mid2, high)
}
}
go(0, 0, as.length - 1)
}
// Exercise 2: Implement a polymorphic function to check whether
// an `Array[A]` is sorted
def isSorted[A](as: Array[A], gt: (A,A) => Boolean): Boolean = {
def loop(n: Int): Boolean = {
if (n >= as.length) true
else if (gt(as(n - 1), as(n))) false
else loop(n + 1)
}
loop(1)
}
// Polymorphic functions are often so constrained by their type
// that they only have one implementation! Here's an example:
// Exercise 3: Implement `partial1`.
def partial1[A,B,C](a: A, f: (A,B) => C): B => C =
b => f(a, b)
// Exercise 4: Implement `curry`.
// Note that `=>` associates to the right, so we could
// write the return type as `A => B => C`
def curry[A,B,C](f: (A, B) => C): A => (B => C) =
a => b => f(a, b)
// NB: The `Function2` trait has a `curried` method already
// Exercise 5: Implement `uncurry`
def uncurry[A,B,C](f: A => B => C): (A, B) => C =
(a, b) => f(a)(b)
/*
NB: There is a method on the `Function` object in the standard library,
`Function.uncurried` that you can use for uncurrying.
Note that we can go back and forth between the two forms. We can curry
and uncurry and the two forms are in some sense "the same". In FP jargon,
we say that they are _isomorphic_ ("iso" = same; "morphe" = shape, form),
a term we inherit from category theory.
*/
// Exercise 6: Implement `compose`
def compose[A,B,C](f: B => C, g: A => B): A => C =
a => f(g(a))
}
| fpinscala-muc/fpinscala-g-fresh | exercises/src/main/scala/fpinscala/gettingstarted/GettingStarted.scala | Scala | mit | 5,366 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import SharedHelpers._
import java.util.concurrent.atomic.AtomicInteger
import Matchers._
class BeforeAndAfterAllConfigMapSpec extends FunSpec {
class ExampleSuite extends FunSuite with BeforeAndAfterAllConfigMap with ParallelTestExecution {
@volatile var beforeAllTime: Long = 0
@volatile var afterAllTime: Long = 0
override protected def beforeAll(configMap: ConfigMap) {
beforeAllTime = System.currentTimeMillis
}
test("test 1") { Thread.sleep(100) }
test("test 2") { Thread.sleep(100) }
test("test 3") { Thread.sleep(100) }
override def newInstance: Suite with ParallelTestExecution = new ExampleSuite
override protected def afterAll(configMap: ConfigMap) {
afterAllTime = System.currentTimeMillis
}
}
class ExampleNestedSuite extends FunSuite with ParallelTestExecution {
test("test 1") { Thread.sleep(100) }
test("test 2") { Thread.sleep(100) }
test("test 3") { Thread.sleep(100) }
override def newInstance: Suite with ParallelTestExecution = new ExampleNestedSuite
}
@Ignore
class ExampleIgnoreNestedSuite extends FunSuite with ParallelTestExecution {
test("test 1") { Thread.sleep(100) }
test("test 2") { Thread.sleep(100) }
test("test 3") { Thread.sleep(100) }
override def newInstance: Suite with ParallelTestExecution = new ExampleNestedSuite
}
class ExampleSuites extends Suites(
new ExampleNestedSuite
) with BeforeAndAfterAllConfigMap {
@volatile var beforeAllTime: Long = 0
@volatile var afterAllTime: Long = 0
override protected def beforeAll(configMap: ConfigMap) {
beforeAllTime = System.currentTimeMillis
}
override protected def afterAll(configMap: ConfigMap) {
afterAllTime = System.currentTimeMillis
}
}
class BeforeAfterAllCounter {
@volatile var beforeAll = new AtomicInteger
@volatile var afterAll = new AtomicInteger
def incrementBeforeAllCount() {
beforeAll.incrementAndGet()
}
def incrementAfterAllCount() {
afterAll.incrementAndGet()
}
def beforeAllCount = beforeAll.get
def afterAllCount = afterAll.get
}
class ExampleBeforeAndAfterAllWithParallelTestExecutionSuite(counter: BeforeAfterAllCounter) extends FunSuite with BeforeAndAfterAllConfigMap
with OneInstancePerTest {
override protected def beforeAll(configMap: ConfigMap) {
counter.incrementBeforeAllCount()
}
override protected def afterAll(configMap: ConfigMap) {
counter.incrementAfterAllCount()
}
test("test 1") { Thread.sleep(100) }
test("test 2") { Thread.sleep(100) }
test("test 3") { Thread.sleep(100) }
override def newInstance: Suite with OneInstancePerTest = new ExampleBeforeAndAfterAllWithParallelTestExecutionSuite(counter)
}
describe("BeforeAndAfterAll") {
it ("should call beforeAll before any test starts, and call afterAll after all tests completed") {
val suite = new ExampleSuite()
val rep = new EventRecordingReporter()
val dist = new TestConcurrentDistributor(2)
suite.run(None, Args(reporter = rep, distributor = Some(dist)))
dist.waitUntilDone()
// should call beforeAll before any test starts
val beforeAllTime = suite.beforeAllTime
val testStartingEvents = rep.testStartingEventsReceived
testStartingEvents should have size 3
testStartingEvents.foreach { testStarting =>
beforeAllTime should be <= testStarting.timeStamp
}
// should call afterAll after all tests completed
val afterAllTime = suite.afterAllTime
val testSucceededEvents = rep.testSucceededEventsReceived
testSucceededEvents should have size 3
testSucceededEvents.foreach { testSucceeded =>
afterAllTime should be >= testSucceeded.timeStamp
}
}
it ("should call beforeAll before any test starts in nested suite, and call afterAll after all tests in nested suites completed") {
val suite = new ExampleSuites
val rep = new EventRecordingReporter
val dist = new TestConcurrentDistributor(2)
suite.run(None, Args(reporter = rep, distributor = Some(dist)))
dist.waitUntilDone()
// should call beforeAll before any test in nested suite starts
val beforeAllTime = suite.beforeAllTime
val testStartingEvents = rep.testStartingEventsReceived
testStartingEvents should have size 3
testStartingEvents.foreach { testStarting =>
beforeAllTime should be <= testStarting.timeStamp
}
// should call afterAll after all tests completed
val afterAllTime = suite.afterAllTime
val testSucceededEvents = rep.testSucceededEventsReceived
testSucceededEvents should have size 3
testSucceededEvents.foreach { testSucceeded =>
afterAllTime should be >= testSucceeded.timeStamp
}
}
it ("should be called once for beforeAll and afterAll when used with OneInstancePerTest") {
val counter = new BeforeAfterAllCounter
val suite = new ExampleBeforeAndAfterAllWithParallelTestExecutionSuite(counter)
val rep = new EventRecordingReporter
suite.run(None, Args(reporter = rep))
counter.beforeAllCount should be (1)
counter.afterAllCount should be (1)
}
it ("should have default invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected value false") {
class ExampleSpec extends FunSpec with BeforeAndAfterAllConfigMap {}
val spec = new ExampleSpec
spec.invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected should be (false)
}
it ("should invoke beforeAll and afterAll in Suite with no nested suites and some tests, when invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected is true") {
class ExampleSpec extends FunSpec with BeforeAndAfterAllConfigMap {
override val invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected = true
val beforeAllCount = new AtomicInteger
val afterAllCount = new AtomicInteger
override protected def beforeAll(configMap: ConfigMap) {
beforeAllCount.incrementAndGet()
}
it("test 1") {}
it("test 2") {}
it("test 3") {}
override protected def afterAll(configMap: ConfigMap) {
afterAllCount.incrementAndGet()
}
}
val spec = new ExampleSpec
spec.invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected should be (true)
spec.run(None, Args(reporter = SilentReporter))
spec.beforeAllCount.get should be (1)
spec.afterAllCount.get should be (1)
}
it ("should invoke beforeAll and afterAll in Suite with no nested suites and some tests, when invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected is false") {
class ExampleSpec extends FunSpec with BeforeAndAfterAllConfigMap {
val beforeAllCount = new AtomicInteger
val afterAllCount = new AtomicInteger
override protected def beforeAll(configMap: ConfigMap) {
beforeAllCount.incrementAndGet()
}
it("test 1") {}
it("test 2") {}
it("test 3") {}
override protected def afterAll(configMap: ConfigMap) {
afterAllCount.incrementAndGet()
}
}
val spec = new ExampleSpec
spec.invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected should be (false)
spec.run(None, Args(reporter = SilentReporter))
spec.beforeAllCount.get should be (1)
spec.afterAllCount.get should be (1)
}
it ("should invoke beforeAll and afterAll in Suite with nested suites and no test, when invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected is true") {
class ExampleSpec extends FunSpec with BeforeAndAfterAllConfigMap {
override val invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected = true
val beforeAllCount = new AtomicInteger
val afterAllCount = new AtomicInteger
override protected def beforeAll(configMap: ConfigMap) {
beforeAllCount.incrementAndGet()
}
override def nestedSuites: collection.immutable.IndexedSeq[Suite] =
Vector(
new ExampleNestedSuite,
new ExampleNestedSuite,
new ExampleNestedSuite
)
override protected def afterAll(configMap: ConfigMap) {
afterAllCount.incrementAndGet()
}
}
val spec = new ExampleSpec
spec.invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected should be (true)
spec.run(None, Args(reporter = SilentReporter))
spec.beforeAllCount.get should be (1)
spec.afterAllCount.get should be (1)
}
it ("should invoke beforeAll and afterAll in Suite with nested suites and no tests, when invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected is false") {
class ExampleSpec extends FunSpec with BeforeAndAfterAllConfigMap {
val beforeAllCount = new AtomicInteger
val afterAllCount = new AtomicInteger
override protected def beforeAll(configMap: ConfigMap) {
beforeAllCount.incrementAndGet()
}
override def nestedSuites: collection.immutable.IndexedSeq[Suite] =
Vector(
new ExampleNestedSuite,
new ExampleNestedSuite,
new ExampleNestedSuite
)
override protected def afterAll(configMap: ConfigMap) {
afterAllCount.incrementAndGet()
}
}
val spec = new ExampleSpec
spec.invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected should be (false)
spec.run(None, Args(reporter = SilentReporter))
spec.beforeAllCount.get should be (1)
spec.afterAllCount.get should be (1)
}
it("should invoke beforeAll and afterAll in Suite annotated with Ignore, has no nested suites and has tests, when invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected is true") {
@Ignore
class ExampleSpec extends FunSpec with BeforeAndAfterAllConfigMap {
override val invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected = true
val beforeAllCount = new AtomicInteger
val afterAllCount = new AtomicInteger
override protected def beforeAll(configMap: ConfigMap) {
beforeAllCount.incrementAndGet()
}
it("test 1") {}
it("test 2") {}
it("test 3") {}
override protected def afterAll(configMap: ConfigMap) {
afterAllCount.incrementAndGet()
}
}
val spec = new ExampleSpec
spec.invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected should be (true)
spec.run(None, Args(reporter = SilentReporter))
spec.beforeAllCount.get should be (1)
spec.afterAllCount.get should be (1)
}
it("should not invoke beforeAll and afterAll in Suite annotated with Ignore, has no nested suites and has tests, when invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected is false") {
@Ignore
class ExampleSpec extends FunSpec with BeforeAndAfterAllConfigMap {
val beforeAllCount = new AtomicInteger
val afterAllCount = new AtomicInteger
override protected def beforeAll(configMap: ConfigMap) {
beforeAllCount.incrementAndGet()
}
it("test 1") {}
it("test 2") {}
it("test 3") {}
override protected def afterAll(configMap: ConfigMap) {
afterAllCount.incrementAndGet()
}
}
val spec = new ExampleSpec
spec.invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected should be (false)
spec.run(None, Args(reporter = SilentReporter))
spec.beforeAllCount.get should be (0)
spec.afterAllCount.get should be (0)
}
it("should not invoke beforeAll and afterAll in Suite that has no test but has nested suites annotated with Ignore, when invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected is true") {
class ExampleSpec extends FunSpec with BeforeAndAfterAllConfigMap {
override val invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected = true
val beforeAllCount = new AtomicInteger
val afterAllCount = new AtomicInteger
override protected def beforeAll(configMap: ConfigMap) {
beforeAllCount.incrementAndGet()
}
override def nestedSuites: collection.immutable.IndexedSeq[Suite] =
Vector(
new ExampleIgnoreNestedSuite,
new ExampleIgnoreNestedSuite,
new ExampleIgnoreNestedSuite
)
override protected def afterAll(configMap: ConfigMap) {
afterAllCount.incrementAndGet()
}
}
val spec = new ExampleSpec
spec.invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected should be (true)
spec.run(None, Args(reporter = SilentReporter))
spec.beforeAllCount.get should be (1)
spec.afterAllCount.get should be (1)
}
it("should not invoke beforeAll and afterAll in Suite that has no test but has nested suites annotated with Ignore, when invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected is false") {
class ExampleSpec extends FunSpec with BeforeAndAfterAllConfigMap {
val beforeAllCount = new AtomicInteger
val afterAllCount = new AtomicInteger
override protected def beforeAll(configMap: ConfigMap) {
beforeAllCount.incrementAndGet()
}
override def nestedSuites: collection.immutable.IndexedSeq[Suite] =
Vector(
new ExampleIgnoreNestedSuite,
new ExampleIgnoreNestedSuite,
new ExampleIgnoreNestedSuite
)
override protected def afterAll(configMap: ConfigMap) {
afterAllCount.incrementAndGet()
}
}
val spec = new ExampleSpec
spec.invokeBeforeAllAndAfterAllEvenIfNoTestsAreExpected should be (false)
spec.run(None, Args(reporter = SilentReporter))
spec.beforeAllCount.get should be (0)
spec.afterAllCount.get should be (0)
}
}
} | travisbrown/scalatest | src/test/scala/org/scalatest/BeforeAndAfterAllConfigMapSpec.scala | Scala | apache-2.0 | 14,531 |
package mojave
import shapeless.Lens
trait Traversal[S, A] {
def modify(s: S)(f: A => A): S
def set(s: S)(x: A) = modify(s)(Function.const(x))
def compose[T](g: Traversal[T, S]) = {
val self = this
new Traversal[T, A] {
override def modify(t: T)(f: (A) => A): T = g.modify(t) { s: S => self.modify(s)(f) }
override def toIterable(s: T): Iterable[A] = g.toIterable(s).flatMap(self.toIterable)
}
}
// Default implementation of toIterable implemented using modify
def toIterable(s: S): Iterable[A] = {
var list: List[A] = Nil
modify(s) { item =>
list ++= List(item)
item
}
list
}
def filter(predicate: A => Boolean) = {
val self = this
new Traversal[S, A] {
override def modify(s: S)(f: (A) => A): S = {
self.modify(s) { a: A => if (predicate(a)) { f(a) } else { a }}
}
override def toIterable(s: S) = self.toIterable(s).filter(predicate)
}
}
def ifInstanceOf[SubTyep <: A](implicit mf: ClassManifest[SubTyep]): Traversal[S, SubTyep] = ClassSelectiveTraversal[A, SubTyep](mf.runtimeClass.asInstanceOf[Class[SubTyep]]).compose(this)
def ++(other: Traversal[S, A]) = ConcatTraversal(List(this, other))
}
case class IdTraversal[A]() extends Traversal[A, A] {
override def modify(s: A)(f: (A) => A): A = f(s)
override def toIterable(s: A): Iterable[A] = List(s)
}
case class ListTraversal[A, B, C[B] <: Iterable[B]](traversal: Traversal[A, C[B]]) {
def items: Traversal[A, B] = new Traversal[A, B] {
def modify(s: A)(f: (B) => B): A = traversal.modify(s){ items: C[B] => items.map(f).asInstanceOf[C[B]] }
override def toIterable(s: A) = traversal.toIterable(s).flatten
}
}
case class OptionTraversal[A, B](traversal: Traversal[A, Option[B]]) {
def items: Traversal[A, B] = new Traversal[A, B] {
def modify(s: A)(f: (B) => B): A = traversal.modify(s){ items => items.map(f) }
override def toIterable(s: A): Iterable[B] = traversal.toIterable(s).flatten
}
}
case class LensTraversal[A, B](lens: Lens[A, B]) extends Traversal[A, B] {
override def toIterable(s: A): Iterable[B] = List(lens.get(s))
def modify(s: A)(f: (B) => B): A = lens.modify(s)(f)
}
case class ConcatTraversal[A, B](traversals: List[Traversal[A, B]]) extends Traversal[A, B] {
override def modify(s: A)(f: (B) => B): A = traversals.foldLeft(s) { case (state, t) => t.modify(state)(f) }
}
private case class ClassSelectiveTraversal[Tyep, SubTyep <: Tyep](subTypeClass: Class[SubTyep]) extends Traversal[Tyep, SubTyep] {
override def toIterable(s: Tyep) = if (subTypeClass.isInstance(s)) { List(s.asInstanceOf[SubTyep]) } else { Nil }
override def modify(s: Tyep)(f: (SubTyep) => SubTyep): Tyep = if (subTypeClass.isInstance(s)) { f(s.asInstanceOf[SubTyep]) } else { s }
} | raimohanska/mojave | src/main/scala/mojave/Traversal.scala | Scala | mit | 2,795 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
import sbt._
object Generators {
// Generates a scala file that contains the Lagom version for use at runtime.
def version(lagomVersion: String, dir: File): Seq[File] = {
val file = dir / "com"/ "lightbend" / "lagom" / "core" / "LagomVersion.scala"
val scalaSource =
"""|package com.lightbend.lagom.core
|
|object LagomVersion {
| val current = "%s"
|}
""".stripMargin.format(lagomVersion)
if (!file.exists() || IO.read(file) != scalaSource) {
IO.write(file, scalaSource)
}
Seq(file)
}
}
| rstento/lagom | project/Tasks.scala | Scala | apache-2.0 | 665 |
package puck.parser.gen
import puck.parser.{SymId, RuleSemiring, RuleStructure}
import scala.collection.JavaConverters._
import puck.linalg.CLMatrix
import com.nativelibs4java.opencl._
import org.bridj.Pointer
import java.util.zip.{ZipOutputStream, ZipFile}
import puck.util.{PointerFreer, ZipUtil}
import scala.Array
/**
* TODO
*
* @author dlwh
**/
case class CLMaskKernels(maskSize: Int,
blocksPerSentence: Int,
blockSize: Int,
getMasksKernel: CLKernel) {
def write(prefix: String, out: ZipOutputStream) {
ZipUtil.addKernel(out, s"$prefix/computeMasksKernel", getMasksKernel)
ZipUtil.serializedEntry(out, s"$prefix/MasksInts", Array(maskSize, blocksPerSentence, blockSize))
}
def getMasks(masks: CLMatrix[Int],
inside: CLMatrix[Float],
outside: CLMatrix[Float],
chartIndices: Array[Int],
lengths: Array[Int],
root: Int, threshold: Float,
events: CLEvent*)(implicit queue: CLQueue):CLEvent = {
require(masks.rows == maskSize, masks.rows + " " + maskSize)
require(masks.cols == inside.cols)
require(masks.cols == outside.cols)
queue.finish()
val ptrCI = Pointer.pointerToArray[java.lang.Integer](chartIndices)
val intBufferCI = queue.getContext.createIntBuffer(CLMem.Usage.InputOutput, chartIndices.length)
val evCI = intBufferCI.write(queue, 0, chartIndices.length, ptrCI, false, events:_*)
val ptrL = Pointer.pointerToArray[java.lang.Integer](lengths)
val intBufferL = queue.getContext.createIntBuffer(CLMem.Usage.InputOutput, lengths.length)
val evL = intBufferL.write(queue, 0, lengths.length, ptrL, false, events:_*)
getMasksKernel.setArgs(masks.data.safeBuffer,
inside.data.safeBuffer, outside.data.safeBuffer, intBufferCI, intBufferL, LocalSize.ofIntArray(maskSize * blockSize),
Integer.valueOf(chartIndices(chartIndices.length-1)), Integer.valueOf(inside.rows),
Integer.valueOf(root), java.lang.Float.valueOf(threshold))
//, LocalSize.ofIntArray(fieldSize * groupSize * 5))
val ev = getMasksKernel.enqueueNDRange(queue, Array(blocksPerSentence * blockSize, chartIndices.length-1, 1), Array(blockSize, 1, 1), evCI, evL)
// queue.finish()
PointerFreer.enqueue(ptrCI.release(), ev)
PointerFreer.enqueue(intBufferCI.release(), ev)
PointerFreer.enqueue(ptrL.release(), ev)
PointerFreer.enqueue(intBufferL.release(), ev)
ev
}
}
object CLMaskKernels {
def read(prefix: String, zf: ZipFile)(implicit ctxt: CLContext) = {
val ints = ZipUtil.deserializeEntry[Array[Int]](zf.getInputStream(zf.getEntry(s"$prefix/MasksInts")))
CLMaskKernels(ints(0), ints(1), ints(2), ZipUtil.readKernel(zf, s"$prefix/computeMasksKernel"))
}
def make[C, L](structure: RuleStructure[C, L])(implicit context: CLContext, semiring: RuleSemiring) = {
val cellSize = (structure.numNonTerms max structure.numTerms)
val maskSize = puck.roundUpToMultipleOf(structure.numCoarseSyms, 32) / 32
val blocksPerSentence = 4
val blockSize = if (context.getDevices.head.toString.contains("Apple") && context.getDevices.head.toString.contains("Intel Core")) {
1
} else {
val wgSizes = context.getDevices.head.getMaxWorkItemSizes
val x = wgSizes(0) min 32
x.toInt
}
val prog = context.createProgram(programText(cellSize, structure))
CLMaskKernels(maskSize, blocksPerSentence, blockSize, prog.createKernel("computeMasks"))
}
def maskHeader[C, L](numCoarseSyms: Int) = {
val maskSize = maskSizeFor(numCoarseSyms)
"""#define NUM_FIELDS """ + maskSize + """
typedef struct { int fields[NUM_FIELDS]; } mask_t;
inline void set_bit(mask_t* mask, int bit, int shouldSet) {
int field = (bit/32);
int modulus = bit%32;
mask->fields[field] = mask->fields[field] | (shouldSet<<(modulus));
}
#define is_set(mask, bit) ((mask)->fields[(bit)/32] & (1<<((bit)%32)))
inline int maskIntersects(const mask_t* mask1, const mask_t* mask2) {
#pragma unroll
for(int i = 0; i < NUM_FIELDS; ++i) {
if(mask1->fields[i] & mask2->fields[i]) return 1;
}
return 0;
}
inline int maskAny(const mask_t* mask1) {
#pragma unroll
for(int i = 0; i < NUM_FIELDS; ++i) {
if(mask1->fields[i]) return 1;
}
return 0;
}
"""
}
def maskSizeFor[L, C](numCoarseSyms: Int): Int = {
puck.roundUpToMultipleOf(numCoarseSyms, 32) / 32
}
def genCheckIfMaskIsEmpty[C, L](structure: RuleStructure[C, L],
nameOfMaskVariable: String,
symbols: java.util.Set[SymId[C, L]]): String = {
genCheckIfMaskIsEmpty(structure, nameOfMaskVariable, symbols.asScala.toSet)
}
def genCheckIfMaskIsEmpty[C, L](structure: RuleStructure[C, L],
nameOfMaskVariable: String,
symbols: Set[SymId[C, L]]): String = {
// set up the mask
val maskStrings = for {
(field, parentsInField) <- symbols
.map(s => structure.refinements.labels.project(s.system))
.groupBy(_ / 32)
} yield parentsInField.map(p => s"(1<<($p%32))").mkString(s"$nameOfMaskVariable.fields[$field] & (", "|", ")")
maskStrings.mkString("(!((", ") | (", ")) )")
}
def programText[L, C](cellSize: Int, structure: RuleStructure[C, L]): String = {
maskHeader(structure.numCoarseSyms) ++ """
#define NUM_SYMS """ + cellSize + """
""" + structure.projectedTerminalMap.padTo(cellSize, 0).mkString("__constant int terminalProjections[] = {", ", ", "};") +
"""
""" + structure.projectedNonterminalMap.padTo(cellSize, 0).mkString("__constant int nonterminalProjections[] = {", ", ", "};") +
"""
// each global_id(0) corresponds to a single sentence.
// we have some number of workers for each sentence, global_size(1)
// indices(i) is the first cell in the i'th sentence
// indices(i+1)-1 is the last cell in the i'th sentence
// the last cell has the root score.
//
/** TODO this isn't optimized at all */
__kernel void computeMasks(__global mask_t* masksOut,
__global const float* inside,
__global const float* outside,
__global const int* indices,
__global const int* lengths,
__local mask_t* tempMasks,
const int numIndices,
int numSyms,
int root,
float thresh) {
const int part = get_group_id(0);
const int numParts = get_num_groups(0);
const int threadid = get_local_id(0);
const int numThreads = get_local_size(0);
const int sentence = get_global_id(1);
const int firstCell = indices[sentence];
const int lastCell = indices[sentence + 1];
int length = lengths[sentence];
const float root_score = inside[(lastCell-1) * numSyms + root];
float cutoff = root_score + thresh;
for(int cell = firstCell + part; cell < lastCell; cell += numParts) {
__constant const int* projections = (cell-firstCell >= length) ? nonterminalProjections : terminalProjections;
__global const float* in = inside + (cell * numSyms);
__global const float* out = outside + (cell * numSyms);
mask_t myMask;
#pragma unroll
for(int i = 0; i < NUM_FIELDS; ++i) {
myMask.fields[i] = 0;
}
#pragma unroll
for(int sym = threadid; sym < NUM_SYMS; sym += numThreads) {
float score = (in[sym] + out[sym]);
int keep = score >= cutoff;
int field = projections[sym];
set_bit(&myMask, field, keep);
}
tempMasks[threadid] = myMask;
barrier(CLK_LOCAL_MEM_FENCE);
for(uint offset = numThreads/2; offset > 0; offset >>= 1){
if(threadid < offset) {
#pragma unroll
for(int i = 0; i < NUM_FIELDS; ++i) {
tempMasks[threadid].fields[i] = tempMasks[threadid].fields[i] | tempMasks[threadid + offset].fields[i];
}
}
barrier(CLK_LOCAL_MEM_FENCE);
}
if(threadid == 0)
masksOut[cell] = tempMasks[0];
}
}
"""
}
}
| malcolmgreaves/puck | src/main/scala/puck/parser/gen/CLMaskKernels.scala | Scala | apache-2.0 | 8,333 |
package org.repwatch.alexa.handlers
import com.amazon.speech.speechlet.SpeechletResponse
import com.amazon.speech.ui.PlainTextOutputSpeech
import org.repwatch.alexa.FindRepresentativeIntent
import org.repwatch.models.User
import org.repwatch.repositories.LegislatorRepository
import scala.concurrent.Await
import scala.concurrent.duration._
object FindRepresentativeIntentHandler {
def handle(intent: FindRepresentativeIntent,
legislatorRepository: LegislatorRepository,
user: User) : SpeechletResponse = {
val representativeFuture = legislatorRepository.locateRepresentative(user.zipCode)
val maybeRepresentative = Await.result(representativeFuture, 5.seconds)
// TODO - Add test coverage for error scenarios
val outputText = maybeRepresentative match {
case Some(representative) => s"Your representative in congress is ${representative.toString}"
case None => "I wasn't able to find a representative for that zip code."
}
val response = new SpeechletResponse
val output = new PlainTextOutputSpeech
output.setText(outputText)
response.setOutputSpeech(output)
response
}
}
| csunwold/repwatch | alexa/src/main/scala/org/repwatch/alexa/handlers/FindRepresentativeIntentHandler.scala | Scala | gpl-3.0 | 1,162 |
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO2
package com.google.protobuf.descriptor
import _root_.scalapb.internal.compat.JavaConverters._
/** Describes a service.
*/
@SerialVersionUID(0L)
final case class ServiceDescriptorProto(
name: _root_.scala.Option[_root_.scala.Predef.String] = _root_.scala.None,
method: _root_.scala.Seq[com.google.protobuf.descriptor.MethodDescriptorProto] = _root_.scala.Seq.empty,
options: _root_.scala.Option[com.google.protobuf.descriptor.ServiceOptions] = _root_.scala.None,
unknownFields: _root_.scalapb.UnknownFieldSet = _root_.scalapb.UnknownFieldSet.empty
) extends scalapb.GeneratedMessage with scalapb.lenses.Updatable[ServiceDescriptorProto] {
@transient
private[this] var __serializedSizeMemoized: _root_.scala.Int = 0
private[this] def __computeSerializedSize(): _root_.scala.Int = {
var __size = 0
if (name.isDefined) {
val __value = name.get
__size += _root_.com.google.protobuf.CodedOutputStream.computeStringSize(1, __value)
};
method.foreach { __item =>
val __value = __item
__size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
}
if (options.isDefined) {
val __value = options.get
__size += 1 + _root_.com.google.protobuf.CodedOutputStream.computeUInt32SizeNoTag(__value.serializedSize) + __value.serializedSize
};
__size += unknownFields.serializedSize
__size
}
override def serializedSize: _root_.scala.Int = {
var __size = __serializedSizeMemoized
if (__size == 0) {
__size = __computeSerializedSize() + 1
__serializedSizeMemoized = __size
}
__size - 1
}
def writeTo(`_output__`: _root_.com.google.protobuf.CodedOutputStream): _root_.scala.Unit = {
name.foreach { __v =>
val __m = __v
_output__.writeString(1, __m)
};
method.foreach { __v =>
val __m = __v
_output__.writeTag(2, 2)
_output__.writeUInt32NoTag(__m.serializedSize)
__m.writeTo(_output__)
};
options.foreach { __v =>
val __m = __v
_output__.writeTag(3, 2)
_output__.writeUInt32NoTag(__m.serializedSize)
__m.writeTo(_output__)
};
unknownFields.writeTo(_output__)
}
def getName: _root_.scala.Predef.String = name.getOrElse("")
def clearName: ServiceDescriptorProto = copy(name = _root_.scala.None)
def withName(__v: _root_.scala.Predef.String): ServiceDescriptorProto = copy(name = Option(__v))
def clearMethod = copy(method = _root_.scala.Seq.empty)
def addMethod(__vs: com.google.protobuf.descriptor.MethodDescriptorProto *): ServiceDescriptorProto = addAllMethod(__vs)
def addAllMethod(__vs: Iterable[com.google.protobuf.descriptor.MethodDescriptorProto]): ServiceDescriptorProto = copy(method = method ++ __vs)
def withMethod(__v: _root_.scala.Seq[com.google.protobuf.descriptor.MethodDescriptorProto]): ServiceDescriptorProto = copy(method = __v)
def getOptions: com.google.protobuf.descriptor.ServiceOptions = options.getOrElse(com.google.protobuf.descriptor.ServiceOptions.defaultInstance)
def clearOptions: ServiceDescriptorProto = copy(options = _root_.scala.None)
def withOptions(__v: com.google.protobuf.descriptor.ServiceOptions): ServiceDescriptorProto = copy(options = Option(__v))
def withUnknownFields(__v: _root_.scalapb.UnknownFieldSet) = copy(unknownFields = __v)
def discardUnknownFields = copy(unknownFields = _root_.scalapb.UnknownFieldSet.empty)
def getFieldByNumber(__fieldNumber: _root_.scala.Int): _root_.scala.Any = {
(__fieldNumber: @_root_.scala.unchecked) match {
case 1 => name.orNull
case 2 => method
case 3 => options.orNull
}
}
def getField(__field: _root_.scalapb.descriptors.FieldDescriptor): _root_.scalapb.descriptors.PValue = {
_root_.scala.Predef.require(__field.containingMessage eq companion.scalaDescriptor)
(__field.number: @_root_.scala.unchecked) match {
case 1 => name.map(_root_.scalapb.descriptors.PString(_)).getOrElse(_root_.scalapb.descriptors.PEmpty)
case 2 => _root_.scalapb.descriptors.PRepeated(method.iterator.map(_.toPMessage).toVector)
case 3 => options.map(_.toPMessage).getOrElse(_root_.scalapb.descriptors.PEmpty)
}
}
def toProtoString: _root_.scala.Predef.String = _root_.scalapb.TextFormat.printToUnicodeString(this)
def companion: com.google.protobuf.descriptor.ServiceDescriptorProto.type = com.google.protobuf.descriptor.ServiceDescriptorProto
// @@protoc_insertion_point(GeneratedMessage[google.protobuf.ServiceDescriptorProto])
}
object ServiceDescriptorProto extends scalapb.GeneratedMessageCompanion[com.google.protobuf.descriptor.ServiceDescriptorProto] with scalapb.JavaProtoSupport[com.google.protobuf.descriptor.ServiceDescriptorProto, com.google.protobuf.DescriptorProtos.ServiceDescriptorProto] {
implicit def messageCompanion: scalapb.GeneratedMessageCompanion[com.google.protobuf.descriptor.ServiceDescriptorProto] with scalapb.JavaProtoSupport[com.google.protobuf.descriptor.ServiceDescriptorProto, com.google.protobuf.DescriptorProtos.ServiceDescriptorProto] = this
def toJavaProto(scalaPbSource: com.google.protobuf.descriptor.ServiceDescriptorProto): com.google.protobuf.DescriptorProtos.ServiceDescriptorProto = {
val javaPbOut = com.google.protobuf.DescriptorProtos.ServiceDescriptorProto.newBuilder
scalaPbSource.name.foreach(javaPbOut.setName)
javaPbOut.addAllMethod(_root_.scalapb.internal.compat.toIterable(scalaPbSource.method.iterator.map(com.google.protobuf.descriptor.MethodDescriptorProto.toJavaProto(_))).asJava)
scalaPbSource.options.map(com.google.protobuf.descriptor.ServiceOptions.toJavaProto(_)).foreach(javaPbOut.setOptions)
javaPbOut.build
}
def fromJavaProto(javaPbSource: com.google.protobuf.DescriptorProtos.ServiceDescriptorProto): com.google.protobuf.descriptor.ServiceDescriptorProto = com.google.protobuf.descriptor.ServiceDescriptorProto(
name = if (javaPbSource.hasName) Some(javaPbSource.getName) else _root_.scala.None,
method = javaPbSource.getMethodList.asScala.iterator.map(com.google.protobuf.descriptor.MethodDescriptorProto.fromJavaProto(_)).toSeq,
options = if (javaPbSource.hasOptions) Some(com.google.protobuf.descriptor.ServiceOptions.fromJavaProto(javaPbSource.getOptions)) else _root_.scala.None
)
def parseFrom(`_input__`: _root_.com.google.protobuf.CodedInputStream): com.google.protobuf.descriptor.ServiceDescriptorProto = {
var __name: _root_.scala.Option[_root_.scala.Predef.String] = _root_.scala.None
val __method: _root_.scala.collection.immutable.VectorBuilder[com.google.protobuf.descriptor.MethodDescriptorProto] = new _root_.scala.collection.immutable.VectorBuilder[com.google.protobuf.descriptor.MethodDescriptorProto]
var __options: _root_.scala.Option[com.google.protobuf.descriptor.ServiceOptions] = _root_.scala.None
var `_unknownFields__`: _root_.scalapb.UnknownFieldSet.Builder = null
var _done__ = false
while (!_done__) {
val _tag__ = _input__.readTag()
_tag__ match {
case 0 => _done__ = true
case 10 =>
__name = Option(_input__.readStringRequireUtf8())
case 18 =>
__method += _root_.scalapb.LiteParser.readMessage[com.google.protobuf.descriptor.MethodDescriptorProto](_input__)
case 26 =>
__options = Option(__options.fold(_root_.scalapb.LiteParser.readMessage[com.google.protobuf.descriptor.ServiceOptions](_input__))(_root_.scalapb.LiteParser.readMessage(_input__, _)))
case tag =>
if (_unknownFields__ == null) {
_unknownFields__ = new _root_.scalapb.UnknownFieldSet.Builder()
}
_unknownFields__.parseField(tag, _input__)
}
}
com.google.protobuf.descriptor.ServiceDescriptorProto(
name = __name,
method = __method.result(),
options = __options,
unknownFields = if (_unknownFields__ == null) _root_.scalapb.UnknownFieldSet.empty else _unknownFields__.result()
)
}
implicit def messageReads: _root_.scalapb.descriptors.Reads[com.google.protobuf.descriptor.ServiceDescriptorProto] = _root_.scalapb.descriptors.Reads{
case _root_.scalapb.descriptors.PMessage(__fieldsMap) =>
_root_.scala.Predef.require(__fieldsMap.keys.forall(_.containingMessage eq scalaDescriptor), "FieldDescriptor does not match message type.")
com.google.protobuf.descriptor.ServiceDescriptorProto(
name = __fieldsMap.get(scalaDescriptor.findFieldByNumber(1).get).flatMap(_.as[_root_.scala.Option[_root_.scala.Predef.String]]),
method = __fieldsMap.get(scalaDescriptor.findFieldByNumber(2).get).map(_.as[_root_.scala.Seq[com.google.protobuf.descriptor.MethodDescriptorProto]]).getOrElse(_root_.scala.Seq.empty),
options = __fieldsMap.get(scalaDescriptor.findFieldByNumber(3).get).flatMap(_.as[_root_.scala.Option[com.google.protobuf.descriptor.ServiceOptions]])
)
case _ => throw new RuntimeException("Expected PMessage")
}
def javaDescriptor: _root_.com.google.protobuf.Descriptors.Descriptor = DescriptorProtoCompanion.javaDescriptor.getMessageTypes().get(8)
def scalaDescriptor: _root_.scalapb.descriptors.Descriptor = DescriptorProtoCompanion.scalaDescriptor.messages(8)
def messageCompanionForFieldNumber(__number: _root_.scala.Int): _root_.scalapb.GeneratedMessageCompanion[_] = {
var __out: _root_.scalapb.GeneratedMessageCompanion[_] = null
(__number: @_root_.scala.unchecked) match {
case 2 => __out = com.google.protobuf.descriptor.MethodDescriptorProto
case 3 => __out = com.google.protobuf.descriptor.ServiceOptions
}
__out
}
lazy val nestedMessagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] = Seq.empty
def enumCompanionForFieldNumber(__fieldNumber: _root_.scala.Int): _root_.scalapb.GeneratedEnumCompanion[_] = throw new MatchError(__fieldNumber)
lazy val defaultInstance = com.google.protobuf.descriptor.ServiceDescriptorProto(
name = _root_.scala.None,
method = _root_.scala.Seq.empty,
options = _root_.scala.None
)
implicit class ServiceDescriptorProtoLens[UpperPB](_l: _root_.scalapb.lenses.Lens[UpperPB, com.google.protobuf.descriptor.ServiceDescriptorProto]) extends _root_.scalapb.lenses.ObjectLens[UpperPB, com.google.protobuf.descriptor.ServiceDescriptorProto](_l) {
def name: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Predef.String] = field(_.getName)((c_, f_) => c_.copy(name = Option(f_)))
def optionalName: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[_root_.scala.Predef.String]] = field(_.name)((c_, f_) => c_.copy(name = f_))
def method: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Seq[com.google.protobuf.descriptor.MethodDescriptorProto]] = field(_.method)((c_, f_) => c_.copy(method = f_))
def options: _root_.scalapb.lenses.Lens[UpperPB, com.google.protobuf.descriptor.ServiceOptions] = field(_.getOptions)((c_, f_) => c_.copy(options = Option(f_)))
def optionalOptions: _root_.scalapb.lenses.Lens[UpperPB, _root_.scala.Option[com.google.protobuf.descriptor.ServiceOptions]] = field(_.options)((c_, f_) => c_.copy(options = f_))
}
final val NAME_FIELD_NUMBER = 1
final val METHOD_FIELD_NUMBER = 2
final val OPTIONS_FIELD_NUMBER = 3
def of(
name: _root_.scala.Option[_root_.scala.Predef.String],
method: _root_.scala.Seq[com.google.protobuf.descriptor.MethodDescriptorProto],
options: _root_.scala.Option[com.google.protobuf.descriptor.ServiceOptions]
): _root_.com.google.protobuf.descriptor.ServiceDescriptorProto = _root_.com.google.protobuf.descriptor.ServiceDescriptorProto(
name,
method,
options
)
// @@protoc_insertion_point(GeneratedMessageCompanion[google.protobuf.ServiceDescriptorProto])
}
| scalapb/ScalaPB | scalapb-runtime/src/main/scalajvm/com/google/protobuf/descriptor/ServiceDescriptorProto.scala | Scala | apache-2.0 | 12,083 |
package spire.algebra
import spire.math.{ Rational, NumberTag }
import spire.std.int._
import spire.std.long._
import spire.std.float._
import spire.std.double._
import spire.syntax.euclideanRing._
import spire.syntax.isReal.{ eqOps => _, _ }
import scala.reflect.ClassTag
import org.scalatest.FunSuite
import org.scalatest.prop.Checkers
import org.scalacheck.{Arbitrary, Gen}
import org.scalacheck.Arbitrary._
import org.scalacheck.Prop._
class GCDTest extends FunSuite with Checkers {
implicit def ArbBigDecimal: Arbitrary[BigDecimal] = Arbitrary(for {
value <- arbitrary[Long]
scale <- arbitrary[Short]
} yield BigDecimal(value, scale.toInt))
implicit def ArbRational: Arbitrary[Rational] = Arbitrary(for {
n <- arbitrary[Long]
d <- arbitrary[Long] if d != 0
} yield Rational(n, d))
def testGcd[A: EuclideanRing: IsReal: NumberTag](x: A, y: A): Boolean = {
(x == Ring[A].zero || y == Ring[A].zero) || {
val den = spire.math.gcd(x, y)
val x0 = x /~ den
val y0 = y /~ den
if (NumberTag[A].isFinite(x0) && NumberTag[A].isFinite(y0)) {
x0.isWhole && y0.isWhole && (spire.math.gcd(x0, y0) == Ring[A].one)
} else {
// Ideally we'd filter this out at the ScalaCheck level.
true
}
}
}
test("GCD of floats with 0 exponent in result is correct") {
val x = -1.586002E-34f
val y = 3.3793717E-7f
val d = spire.math.gcd(x, y)
assert((x / d).isWhole === true)
assert((y / d).isWhole === true)
assert(spire.math.gcd(x / d, y / d) === 1f)
}
test("Int GCD")(check(forAll { (a: Int, b: Int) => testGcd(a, b) }))
test("Long GCD")(check(forAll { (a: Long, b: Long) => testGcd(a, b) }))
test("Float GCD")(check(forAll { (a: Float, b: Float) => testGcd(a, b) }))
test("Double GCD")(check(forAll { (a: Double, b: Double) => testGcd(a, b) }))
// Disabled. Getting unexplainable OOM errors, even with isWhole commented out.
// test("BigDecimal GCD")(check(forAll { (a: BigDecimal, b: BigDecimal) => testGcd(a, b) }))
test("Rational GCD")(check(forAll { (a: Rational, b: Rational) => testGcd(a, b) }))
}
| guersam/spire | tests/src/test/scala/spire/algebra/GCDTest.scala | Scala | mit | 2,127 |
package org.efset.writer
import org.efset.ContextConfig
import org.efset.Model.ModelThing
import org.efset.writer.ElasticsearchDataWriterComponent._
class TestSessionElasticsearchDataWriterComponent extends ElasticsearchDataWriterComponent {
override val dataWriter: DataWriter[ModelThing] = new ElasticsearchDataWriter(createClient(ContextConfig.esHost, ContextConfig.esPort), "test_session")
}
| ef-ice/cassandra-exporter | src/main/scala/org/efset/writer/TestSessionElasticsearchDataWriterComponent.scala | Scala | mit | 403 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.tools.export
import java.io._
import java.util.Collections
import com.beust.jcommander.validators.PositiveInteger
import com.beust.jcommander.{Parameter, ParameterException}
import com.typesafe.scalalogging.LazyLogging
import org.apache.commons.io.FilenameUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileContext, Path}
import org.apache.hadoop.mapreduce.Job
import org.geotools.data.{DataStore, FileDataStore, Query}
import org.geotools.filter.text.ecql.ECQL
import org.geotools.util.factory.Hints
import org.locationtech.geomesa.index.conf.QueryHints
import org.locationtech.geomesa.index.geoserver.ViewParams
import org.locationtech.geomesa.index.iterators.BinAggregatingScan
import org.locationtech.geomesa.index.planning.QueryRunner
import org.locationtech.geomesa.jobs.JobResult.{JobFailure, JobSuccess}
import org.locationtech.geomesa.jobs.{GeoMesaConfigurator, JobResult}
import org.locationtech.geomesa.tools.Command.CommandException
import org.locationtech.geomesa.tools.DistributedRunParam.RunModes
import org.locationtech.geomesa.tools._
import org.locationtech.geomesa.tools.`export`.formats.FeatureExporter.LazyExportStream
import org.locationtech.geomesa.tools.export.ExportCommand.{ChunkedExporter, ExportOptions, ExportParams, Exporter}
import org.locationtech.geomesa.tools.export.formats.FileSystemExporter.{OrcFileSystemExporter, ParquetFileSystemExporter}
import org.locationtech.geomesa.tools.export.formats._
import org.locationtech.geomesa.tools.utils.ParameterConverters.{BytesConverter, ExportFormatConverter}
import org.locationtech.geomesa.tools.utils.{JobRunner, NoopParameterSplitter, Prompt, TerminalCallback}
import org.locationtech.geomesa.utils.collection.CloseableIterator
import org.locationtech.geomesa.utils.io.{FileSizeEstimator, IncrementingFileName, PathUtils, WithClose}
import org.locationtech.geomesa.utils.stats.MethodProfiling
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.filter.Filter
import org.opengis.filter.sort.SortOrder
import scala.annotation.tailrec
import scala.util.control.NonFatal
trait ExportCommand[DS <: DataStore] extends DataStoreCommand[DS]
with DistributedCommand with InteractiveCommand with MethodProfiling {
import ExportCommand.CountKey
override val name = "export"
override def params: ExportParams
override def libjarsFiles: Seq[String] = Seq("org/locationtech/geomesa/tools/export-libjars.list")
override def execute(): Unit = {
def complete(result: JobResult, time: Long): Unit = {
result match {
case JobSuccess(message, counts) =>
val count = counts.get(CountKey).map(c => s" for $c features").getOrElse("")
Command.user.info(s"$message$count in ${time}ms")
case JobFailure(message) =>
Command.user.info(s"Feature export failed in ${time}ms: $message")
throw new CommandException(message) // propagate out and return an exit code error
}
}
profile(complete _)(withDataStore(export))
}
private def export(ds: DS): JobResult = {
// for file data stores, handle setting the default type name so the user doesn't have to
for {
p <- Option(params).collect { case p: ProvidedTypeNameParam => p }
f <- Option(ds).collect { case f: FileDataStore => f }
} { p.featureName = f.getSchema.getTypeName }
val options = ExportOptions(params)
val remote = options.file.exists(PathUtils.isRemote)
val reducers = Option(params.reducers).map(_.intValue()).getOrElse(-1)
val mode = params.mode.getOrElse(if (reducers == -1) { RunModes.Local } else { RunModes.Distributed })
val chunks = Option(params.chunkSize).map(_.longValue())
// do some validation up front
if (options.file.isEmpty && !options.format.streaming) {
throw new ParameterException(s"Format '${options.format}' requires file-based output, please use --output")
} else if (remote && options.format == ExportFormat.Shp) {
throw new ParameterException("Shape file export is not supported for distributed file systems")
} else if (!remote && mode == RunModes.Distributed) {
throw new ParameterException("Distributed export requires an output file in a distributed file system")
} else if (mode == RunModes.Distributed && params.maxFeatures != null) {
throw new ParameterException("Distributed export does not support --max-features")
}
val sft = ds.getSchema(params.featureName)
if (sft == null) {
throw new ParameterException(s"Schema '${params.featureName}' does not exist in the store")
}
val query = ExportCommand.createQuery(sft, params)
mode match {
case RunModes.Local =>
options.file.map(PathUtils.getHandle).foreach { file =>
if (file.exists) {
if (params.force) {
Command.user.warn(s"Output file '${file.path}' exists - deleting it")
} else if (!Prompt.confirm(s"WARNING Output file '${file.path}' exists, delete it and continue (y/n)? ")) {
throw new ParameterException(s"Output file '${file.path}' exists")
}
file.delete()
}
}
lazy val dictionaries = ArrowExporter.queryDictionaries(ds, query)
val exporter = chunks match {
case None => new Exporter(options, query.getHints, dictionaries)
case Some(c) => new ChunkedExporter(options, query.getHints, dictionaries, c)
}
val count = try { export(ds, query, exporter, !params.suppressEmpty) } finally { exporter.close() }
val outFile = options.file match {
case None => "standard out"
case Some(f) if chunks.isDefined => PathUtils.getBaseNameAndExtension(f).productIterator.mkString("_*")
case Some(f) => f
}
JobSuccess(s"Feature export complete to $outFile", count.map(CountKey -> _).toMap)
case RunModes.Distributed =>
val job = Job.getInstance(new Configuration, "GeoMesa Tools Export")
// for remote jobs, don't push down format transforms, to enable counting and global sorting
val hints = new Hints(query.getHints)
ExportCommand.disableAggregation(sft, query.getHints)
configure(job, ds, query) // note: do this first so that input format is set for the TotalOrderPartitioner
// note: these confs should be set by the input format
val reduce = Seq(GeoMesaConfigurator.Keys.FeatureReducer, GeoMesaConfigurator.Keys.Sorting).filter { key =>
job.getConfiguration.get(key) != null
}
if (reducers < 1 && reduce.nonEmpty) {
if (reduce.contains(GeoMesaConfigurator.Keys.Sorting)) {
throw new ParameterException("Distributed export sorting requires --num-reducers")
} else {
throw new ParameterException(s"Distributed export format '${options.format}' requires --num-reducers")
}
}
// must be some due to our remote check
val file = options.file.getOrElse(throw new IllegalStateException("file should be Some"))
val output = new Path(PathUtils.getUrl(file).toURI).getParent
// file output format doesn't generally let you write to an existing directory
val context = FileContext.getFileContext(output.toUri, job.getConfiguration)
if (context.util.exists(output)) {
val warning = s"Output directory '$output' exists - files may be overwritten"
if (params.force) {
Command.user.warn(warning)
} else if (!Prompt.confirm(s"WARNING $warning. Continue anyway (y/n)? ")) {
if (Prompt.confirm("WARNING DATA MAY BE LOST - delete directory and proceed with export (y/n)? ")) {
context.delete(output, true)
} else {
throw new ParameterException(s"Output directory '$output' exists")
}
}
}
val filename = FilenameUtils.getName(file)
// note: use our original hints, which have the aggregating keys
ExportJob.configure(job, connection, sft, hints, filename, output, options.format, options.headers,
chunks, options.gzip, reducers, libjars(options.format), libjarsPaths)
val reporter = TerminalCallback()
JobRunner.run(job, reporter, ExportJob.Counters.mapping(job), ExportJob.Counters.reducing(job)).merge {
Some(JobSuccess(s"Feature export complete to $output", Map(CountKey -> ExportJob.Counters.count(job))))
}
case _ => throw new NotImplementedError() // someone added a run mode and didn't implement it here...
}
}
/**
* Hook for overriding export
*
* @param ds data store
* @param query query
* @param exporter exporter
* @param writeEmptyFiles export empty files or no
* @return
*/
protected def export(ds: DS, query: Query, exporter: FeatureExporter, writeEmptyFiles: Boolean): Option[Long] = {
try {
Command.user.info("Running export - please wait...")
val features = ds.getFeatureSource(query.getTypeName).getFeatures(query)
WithClose(CloseableIterator(features.features())) { iter =>
if (writeEmptyFiles || iter.hasNext) {
exporter.start(features.getSchema)
exporter.export(iter)
} else {
Some(0L)
}
}
} catch {
case NonFatal(e) =>
throw new RuntimeException("Could not execute export query. Please ensure " +
"that all arguments are correct", e)
}
}
/**
* Configure an export job, with the appropriate input format for this particular data store. Must be
* overridden to provide distributed export support
*
* @return
*/
protected def configure(job: Job, ds: DS, query: Query): Unit =
throw new ParameterException("Distributed export is not supported by this store, please use --run-mode local")
/**
* Get the list of libjars files for a given format
*
* @param format export format
* @return
*/
private def libjars(format: ExportFormat): Seq[String] = {
val path = s"org/locationtech/geomesa/tools/export-libjars-$format.list"
Seq(path).filter(getClass.getClassLoader.getResource(_) != null) ++ libjarsFiles
}
}
object ExportCommand extends LazyLogging {
import org.locationtech.geomesa.index.conf.QueryHints.RichHints
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
import scala.collection.JavaConverters._
private val CountKey = "count"
/**
* Create the query to execute
*
* @param sft simple feature type
* @param params parameters
* @return
*/
def createQuery(sft: SimpleFeatureType, params: ExportParams): Query = {
val typeName = Option(params).collect { case p: TypeNameParam => p.featureName }.orNull
val filter = Option(params.cqlFilter).getOrElse(Filter.INCLUDE)
val query = new Query(typeName, filter)
val hints = query.getHints
Option(params.maxFeatures).map(Int.unbox).foreach(query.setMaxFeatures)
Option(params).collect { case p: OptionalIndexParam => p }.foreach { p =>
Option(p.index).foreach { index =>
logger.debug(s"Using index $index")
hints.put(QueryHints.QUERY_INDEX, index)
}
}
Option(params.hints).foreach { hintStrings =>
hints.put(Hints.VIRTUAL_TABLE_PARAMETERS, hintStrings)
ViewParams.setHints(query)
}
if (params.outputFormat == ExportFormat.Arrow) {
hints.put(QueryHints.ARROW_ENCODE, java.lang.Boolean.TRUE)
} else if (params.outputFormat == ExportFormat.Bin) {
// if not specified in hints, set it here to trigger the bin query
if (!hints.containsKey(QueryHints.BIN_TRACK)) {
hints.put(QueryHints.BIN_TRACK, "id")
}
if (!hints.containsKey(QueryHints.BIN_DTG)) {
sft.getDtgField.foreach(hints.put(QueryHints.BIN_DTG, _))
}
} else if (params.outputFormat == ExportFormat.Leaflet) {
if (!LeafletMapExporter.configure(params)) {
throw new ParameterException("Terminating execution")
}
}
val attributes: Array[String] = {
val combined = params.attributes.asScala ++ params.transforms.asScala
if (combined.nonEmpty) {
val (id, attributes) = combined.partition(_.equalsIgnoreCase("id"))
if (id.isEmpty && !hints.containsKey(QueryHints.ARROW_INCLUDE_FID)) {
// note: we also use this hint for delimited text export
hints.put(QueryHints.ARROW_INCLUDE_FID, java.lang.Boolean.FALSE)
}
attributes.toArray
} else if (params.outputFormat == ExportFormat.Bin) {
BinAggregatingScan.propertyNames(hints, sft).toArray
} else {
null // all props
}
}
query.setPropertyNames(attributes)
if (!params.sortFields.isEmpty) {
val fields = params.sortFields.asScala
if (fields.exists(a => sft.indexOf(a) == -1)) {
val errors = fields.filter(a => sft.indexOf(a) == -1)
throw new ParameterException(s"Invalid sort attribute${if (errors.lengthCompare(1) == 0) "" else "s"}: " +
errors.mkString(", "))
}
val order = if (params.sortDescending) { SortOrder.DESCENDING } else { SortOrder.ASCENDING }
query.setSortBy(fields.map(f => org.locationtech.geomesa.filter.ff.sort(f, order)).toArray)
} else if (hints.isArrowQuery) {
hints.getArrowSort.foreach { case (f, r) =>
val order = if (r) { SortOrder.DESCENDING } else { SortOrder.ASCENDING }
query.setSortBy(Array(org.locationtech.geomesa.filter.ff.sort(f, order)))
}
}
logger.debug(s"Applying CQL filter ${ECQL.toCQL(filter)}")
logger.debug(s"Applying transform ${Option(attributes).map(_.mkString(",")).orNull}")
QueryRunner.configureDefaultQuery(sft, query)
}
/**
* Disable hints for aggregating scans by removing them, and moving any aggregating sort hints to
* regular sort hints
*
* @param sft simple feature type
* @param hints hints
* @return
*/
def disableAggregation(sft: SimpleFeatureType, hints: Hints): Unit = {
if (hints.isArrowQuery) {
hints.remove(QueryHints.ARROW_ENCODE)
val sort = hints.remove(QueryHints.ARROW_SORT_FIELD).asInstanceOf[String]
if (sort != null) {
val reverse = Option(hints.remove(QueryHints.ARROW_SORT_REVERSE).asInstanceOf[java.lang.Boolean])
val order = if (reverse.exists(_.booleanValue)) { SortOrder.DESCENDING } else { SortOrder.ASCENDING }
val hint = org.locationtech.geomesa.filter.ff.sort(sort, order)
hints.put(QueryHints.Internal.SORT_FIELDS, QueryHints.Internal.toSortHint(Array(hint)))
}
} else if (hints.isBinQuery) {
hints.remove(QueryHints.BIN_TRACK)
if (hints.isBinSorting) {
hints.getBinDtgField.orElse(sft.getDtgField).foreach { dtg =>
val hint = org.locationtech.geomesa.filter.ff.sort(dtg, SortOrder.ASCENDING)
hints.put(QueryHints.Internal.SORT_FIELDS, QueryHints.Internal.toSortHint(Array(hint)))
}
}
}
}
/**
* Options from the input params, in a more convenient format
*
* @param format output format
* @param file file path (or stdout)
* @param gzip compression
* @param headers headers (for delimited text only)
*/
case class ExportOptions(format: ExportFormat, file: Option[String], gzip: Option[Int], headers: Boolean)
object ExportOptions {
def apply(params: ExportParams): ExportOptions =
ExportOptions(params.outputFormat, Option(params.file), Option(params.gzip).map(_.intValue), !params.noHeader)
}
/**
* Single exporter that handles the command options and delegates to the correct implementation
*
* @param options options
* @param hints query hints
* @param dictionaries lazily evaluated arrow dictionaries
*/
class Exporter(options: ExportOptions, hints: Hints, dictionaries: => Map[String, Array[AnyRef]])
extends FeatureExporter {
// used only for streaming export formats
private lazy val stream = {
// avro compression is handled differently, see AvroExporter below
val gzip = options.gzip.filter(_ => options.format != ExportFormat.Avro)
new LazyExportStream(options.file, gzip)
}
// used only for file-based export formats
private lazy val name = options.file.getOrElse {
// should have been validated already...
throw new IllegalStateException("Export format requires a file but none was specified")
}
// noinspection ComparingUnrelatedTypes
private lazy val fids = !Option(hints.get(QueryHints.ARROW_INCLUDE_FID)).contains(java.lang.Boolean.FALSE)
private val exporter = options.format match {
case ExportFormat.Arrow => new ArrowExporter(stream, hints, dictionaries)
case ExportFormat.Avro => new AvroExporter(stream, options.gzip)
case ExportFormat.Bin => new BinExporter(stream, hints)
case ExportFormat.Csv => DelimitedExporter.csv(stream, options.headers, fids)
case ExportFormat.Gml2 => GmlExporter.gml2(stream)
case ExportFormat.Gml3 => GmlExporter(stream)
case ExportFormat.Json => new GeoJsonExporter(stream)
case ExportFormat.Leaflet => new LeafletMapExporter(stream)
case ExportFormat.Null => NullExporter
case ExportFormat.Orc => new OrcFileSystemExporter(name)
case ExportFormat.Parquet => new ParquetFileSystemExporter(name)
case ExportFormat.Shp => new ShapefileExporter(new File(name))
case ExportFormat.Tsv => DelimitedExporter.tsv(stream, options.headers, fids)
// shouldn't happen unless someone adds a new format and doesn't implement it here
case _ => throw new NotImplementedError(s"Export for '${options.format}' is not implemented")
}
override def start(sft: SimpleFeatureType): Unit = exporter.start(sft)
override def export(features: Iterator[SimpleFeature]): Option[Long] = exporter.export(features)
override def bytes: Long = exporter.bytes
override def close(): Unit = exporter.close()
}
/**
* Feature exporter that handles chunking output into multiple files
*
* @param options export options
* @param hints query hints
* @param dictionaries arrow dictionaries (lazily evaluated)
* @param chunks number of bytes to write per file
*/
class ChunkedExporter(
options: ExportOptions,
hints: Hints,
dictionaries: => Map[String, Array[AnyRef]],
chunks: Long
) extends FeatureExporter with LazyLogging {
private val names = options.file match {
case None => Iterator.continually(None)
case Some(f) => new IncrementingFileName(f).map(Option.apply)
}
private lazy val queriedDictionaries = dictionaries // only evaluate once, even if we have multiple chunks
private var sft: SimpleFeatureType = _
private var exporter: FeatureExporter = _
private var estimator: FileSizeEstimator = _
private var count = 0L // number of features written
private var total = 0L // sum size of all finished chunks
override def start(sft: SimpleFeatureType): Unit = {
this.sft = sft
estimator = new FileSizeEstimator(chunks, 0.05f, options.format.bytesPerFeature(sft)) // 5% error threshold
nextChunk()
}
override def export(features: Iterator[SimpleFeature]): Option[Long] = export(features, None)
override def bytes: Long = if (exporter == null) { total } else { total + exporter.bytes }
override def close(): Unit = if (exporter != null) { exporter.close() }
private def nextChunk(): Unit = {
if (exporter != null) {
exporter.close()
// adjust our estimate to account for the actual bytes written
// do this after closing the exporter to account for footers, batching, etc
val written = exporter.bytes
estimator.update(written, count)
total += written
}
exporter = new Exporter(options.copy(file = names.next), hints, queriedDictionaries)
exporter.start(sft)
count = 0L
}
@tailrec
private def export(features: Iterator[SimpleFeature], result: Option[Long]): Option[Long] = {
var estimate = estimator.estimate(exporter.bytes)
val counter = features.takeWhile { _ => count += 1; estimate -= 1; estimate >= 0 }
val exported = exporter.export(counter) match {
case None => result
case Some(c) => result.map(_ + c).orElse(Some(c))
}
if (features.isEmpty) {
exported
} else {
// if it's a countable format, the bytes should be available now and we can compare to our chunk size
// otherwise, the bytes aren't generally available until after closing the writer,
// so we have to go with our initial estimate and adjust after the first chunk
if (options.format.countable) {
val bytes = exporter.bytes
if (estimator.done(bytes)) {
nextChunk()
} else {
estimator.update(bytes, count)
}
} else {
nextChunk()
}
export(features, exported)
}
}
}
/**
* Export parameters
*/
trait ExportParams extends OptionalCqlFilterParam with QueryHintsParams
with DistributedRunParam with TypeNameParam with NumReducersParam with OptionalForceParam {
@Parameter(names = Array("-o", "--output"), description = "Output to a file instead of std out")
var file: String = _
@Parameter(names = Array("--gzip"), description = "Level of gzip compression to apply to output, from 1-9")
var gzip: Integer = _
@Parameter(
names = Array("--no-header"),
description = "Export as a delimited text format (csv|tsv) without a type header")
var noHeader: Boolean = false
@Parameter(
names = Array("--suppress-empty"),
description = "Suppress all output (headers, etc) if there are no features exported")
var suppressEmpty: Boolean = false
@Parameter(
names = Array("-m", "--max-features"),
description = "Restrict the maximum number of features returned")
var maxFeatures: java.lang.Integer = _
@Parameter(
names = Array("--attribute"),
description = "Attributes or derived expressions to export, or 'id' to include the feature ID",
splitter = classOf[NoopParameterSplitter])
var transforms: java.util.List[String] = Collections.emptyList()
@Parameter(
names = Array("-a", "--attributes"),
description = "Comma-separated attributes to export, or 'id' to include the feature ID")
var attributes: java.util.List[String] = Collections.emptyList()
@Parameter(names = Array("--sort-by"), description = "Sort by the specified attributes (comma-delimited)")
var sortFields: java.util.List[String] = Collections.emptyList()
@Parameter(
names = Array("--sort-descending"),
description = "Sort in descending order, instead of ascending",
arity = 0)
var sortDescending: Boolean = false
@Parameter(
names = Array("--chunk-size"),
description = "Split the output into multiple files, by specifying the rough number of bytes to store per file",
converter = classOf[BytesConverter])
var chunkSize: java.lang.Long = _
@Parameter(
names = Array("-F", "--output-format"),
description = "File format of output files (csv|tsv|gml|json|shp|avro|leaflet|orc|parquet|arrow)",
converter = classOf[ExportFormatConverter])
var explicitOutputFormat: ExportFormat = _
lazy val outputFormat: ExportFormat = {
if (explicitOutputFormat != null) { explicitOutputFormat } else {
Option(file).flatMap(f => ExportFormat(PathUtils.getUncompressedExtension(f))).getOrElse(ExportFormat.Csv)
}
}
}
}
| locationtech/geomesa | geomesa-tools/src/main/scala/org/locationtech/geomesa/tools/export/ExportCommand.scala | Scala | apache-2.0 | 24,346 |
package org.scalarules.finance.nl
// scalastyle:off method.name
/**
* Representeert een periode in maanden.
*/
case class Periode private[finance](inMaanden: Int) {
require (inMaanden >= 0)
/** Returnt de som van deze periode en n. */
def + (n: Periode): Periode = Periode(inMaanden + n.inMaanden)
/** Returnt het verschil tussen deze periode en n. */
def - (n: Periode): Periode = Periode(inMaanden - n.inMaanden)
/** Returnt hoe vaak deze periode in een jaar past. */
def frequentiePerJaar: Int = 12 / inMaanden
/** Returnt deze periode als een Int, afgekapt op hele jaren. */
def inAfgekapteJaren: Int = inMaanden / 12
/** Kapt deze periode af (naar beneden) op hele jaren. */
def afgekaptOpJaren: Periode = inAfgekapteJaren.jaar
/** Past f toe op alle jaren binnen deze `Periode`, beginnend bij 0, en afgekapt op hele jaren. */
def mapOverJaren[T](f: Int => T): Seq[T] = (0 until inAfgekapteJaren) map f
override def toString = s"$inMaanden maanden"
}
trait PeriodeImplicits {
implicit class IntToTijdsduur(value: Int) {
/** Maakt een tijdsduur in maanden. */
def maand: Periode = maanden
/** Maakt een tijdsduur in maanden. */
def maanden: Periode = Periode(value)
/** Maakt een tijdsduur in jaren. */
def jaar: Periode = Periode(value * 12)
}
implicit object OrderingPeriode extends Ordering[Periode] {
override def compare(x: Periode, y: Periode): Int = x.inMaanden compare y.inMaanden
}
}
| scala-rules/finance-dsl | src/main/scala/org/scalarules/finance/nl/Periode.scala | Scala | mit | 1,475 |
/*
* Copyright (c) 2013-2014 Telefónica Investigación y Desarrollo S.A.U.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package es.tid.cosmos.servicemanager.ambari
import es.tid.cosmos.servicemanager.configuration.DynamicPropertiesFactory
import es.tid.cosmos.servicemanager.configuration.ConfigurationKeys._
import es.tid.cosmos.servicemanager.ambari.configuration.HadoopConfig
/** Factory for run-time, dynamic cluster configuration.
*
* @constructor
* @param hadoopConfig the hadoop configuration.
* @param infinityMasterName gets the current infinity master node name when called
*/
class AmbariDynamicPropertiesFactory(
hadoopConfig: HadoopConfig,
infinityMasterName: () => Option[String]) extends DynamicPropertiesFactory {
override def forCluster(masterName: String, slaveNames: Seq[String]): ConfigProperties = Map(
MasterNode -> masterName,
InfinityMasterNode -> infinityMasterName().getOrElse(""),
HdfsReplicationFactor -> Math.min(3, slaveNames.length).toString,
NameNodeHttpPort -> hadoopConfig.nameNodeHttpPort.toString,
MrAppMasterMemory -> hadoopConfig.mrAppMasterMemory.toString,
MapTaskMemory -> hadoopConfig.mapTaskMemory.toString,
MapHeapMemory -> hadoopConfig.mapHeapMemory.toString,
MaxMapTasks -> (hadoopConfig.mappersPerSlave * slaveNames.length).toString,
ReduceTaskMemory -> hadoopConfig.reduceTaskMemory.toString,
ReduceHeapMemory -> hadoopConfig.reduceHeapMemory.toString,
MaxReduceTasks -> (1.75 * hadoopConfig.reducersPerSlave * slaveNames.length).round.toString,
YarnTotalMemory -> hadoopConfig.yarnTotalMemory.toString,
YarnContainerMinimumMemory -> hadoopConfig.yarnContainerMinimumMemory.toString,
YarnVirtualToPhysicalMemoryRatio -> hadoopConfig.yarnVirtualToPhysicalMemoryRatio.toString,
ZookeeperHosts -> zookeeperHosts(slaveNames, hadoopConfig.zookeeperPort).mkString(","),
ZookeeperPort -> hadoopConfig.zookeeperPort.toString
)
private def zookeeperHosts(hosts: Seq[String], port: Int) = hosts.map(host => s"$host:$port")
}
| telefonicaid/fiware-cosmos-platform | ambari-service-manager/src/main/scala/es/tid/cosmos/servicemanager/ambari/AmbariDynamicPropertiesFactory.scala | Scala | apache-2.0 | 2,591 |
import leon.invariant._
import leon.instrumentation._
object BigNums {
sealed abstract class BigNum
case class Cons(head: BigInt, tail: BigNum) extends BigNum
case class Nil() extends BigNum
def incrTime(l: BigNum) : BigInt = {
l match {
case Nil() => 1
case Cons(x, tail) =>
if(x == 0) 1
else 1 + incrTime(tail)
}
}
def potentialIncr(l: BigNum) : BigInt = {
l match {
case Nil() => 0
case Cons(x, tail) =>
if(x == 0) potentialIncr(tail)
else 1 + potentialIncr(tail)
}
}
def increment(l: BigNum) : BigNum = {
l match {
case Nil() => Cons(1,l)
case Cons(x, tail) =>
if(x == 0) Cons(1, tail)
else Cons(0, increment(tail))
}
} ensuring (res => steps <= ? * incrTime(l) + ? && incrTime(l) + potentialIncr(res) - potentialIncr(l) <= ?)
/**
* Nop is the number of operations
*/
def incrUntil(nop: BigInt, l: BigNum) : BigNum = {
if(nop == 0) l
else {
incrUntil(nop-1, increment(l))
}
} ensuring (res => steps <= ? * nop + ? * potentialIncr(l) + ?)
def count(nop: BigInt) : BigNum = {
incrUntil(nop, Nil())
} ensuring (res => steps <= ? * nop + ?)
}
| epfl-lara/leon | testcases/web/resourcebounds/11_Amortized_BigNums.scala | Scala | gpl-3.0 | 1,215 |
package pio.refactor
import io.prediction.controller.PDataSource
import io.prediction.controller.EmptyEvaluationInfo
import io.prediction.controller.EmptyActualResult
import io.prediction.controller.Params
import io.prediction.controller._
import io.prediction.data.storage.Event
import io.prediction.data.storage.Storage
import org.apache.spark.SparkContext
import org.apache.spark.SparkContext._
import org.apache.spark.rdd.RDD
import grizzled.slf4j.Logger
//case class DataSourceParams(appId: Int) extends Params
class DataSource
extends PDataSource[
TrainingData,
EmptyEvaluationInfo,
Query,
ActualResult] {
@transient lazy val logger = Logger[this.type]
override
def readTraining(sc: SparkContext): TrainingData = {
new TrainingData(
events = sc.parallelize(0 until 100)
)
}
override
def readEval(sc: SparkContext)
: Seq[(TrainingData, EmptyEvaluationInfo, RDD[(Query, ActualResult)])] =
{
logger.error("Datasource!!!")
(0 until 3).map { ex =>
(
readTraining(sc),
new EmptyEvaluationInfo(),
sc
.parallelize((0 until 20)
.map {i => (Query(i), new ActualResult())}))
}
}
}
class TrainingData(
val events: RDD[Int]
) extends Serializable {
override def toString = {
s"events: [${events.count()}] (${events.take(2).toList}...)"
}
}
| ch33hau/PredictionIO | examples/experimental/scala-refactor-test/src/main/scala/DataSource.scala | Scala | apache-2.0 | 1,373 |
package org.jetbrains.plugins.scala.project
import java.io.File
import com.intellij.openapi.vfs.{VirtualFile, VirtualFileListener, VirtualFileSystem}
/**
* @author Pavel Fatin
*/
class AbsentLocalFile(url: String, path: String) extends VirtualFile {
def getName = throw new UnsupportedOperationException()
def getLength = throw new UnsupportedOperationException()
def getFileSystem = AbsentLocalFileSystem
def contentsToByteArray() = throw new UnsupportedOperationException()
def getParent = throw new UnsupportedOperationException()
def refresh(asynchronous: Boolean, recursive: Boolean, postRunnable: Runnable) =
throw new UnsupportedOperationException()
def getTimeStamp = throw new UnsupportedOperationException()
def getOutputStream(requestor: AnyRef, newModificationStamp: Long, newTimeStamp: Long) =
throw new UnsupportedOperationException()
def isDirectory = throw new UnsupportedOperationException()
def getPath: String = path
def isWritable = throw new UnsupportedOperationException()
def isValid = false
def getChildren = throw new UnsupportedOperationException()
def getInputStream = throw new UnsupportedOperationException()
override def getUrl: String = url
}
object AbsentLocalFileSystem extends VirtualFileSystem {
def getProtocol = throw new UnsupportedOperationException()
def renameFile(requestor: AnyRef, vFile: VirtualFile, newName: String) =
throw new UnsupportedOperationException()
def createChildFile(requestor: AnyRef, vDir: VirtualFile, fileName: String) =
throw new UnsupportedOperationException()
def addVirtualFileListener(virtualFileListener: VirtualFileListener) =
throw new UnsupportedOperationException()
def refreshAndFindFileByPath(s: String) = throw new UnsupportedOperationException()
def copyFile(requestor: AnyRef, virtualFile: VirtualFile, newParent: VirtualFile, copyName: String) =
throw new UnsupportedOperationException()
def refresh(asynchronous: Boolean) = throw new UnsupportedOperationException()
def isReadOnly = throw new UnsupportedOperationException()
def createChildDirectory(requestor: AnyRef, vDir: VirtualFile, dirName: String) =
throw new UnsupportedOperationException()
def removeVirtualFileListener(virtualFileListener: VirtualFileListener) =
throw new UnsupportedOperationException()
def moveFile(requestor: AnyRef, vFile: VirtualFile, newParent: VirtualFile) =
throw new UnsupportedOperationException()
def findFileByPath(path: String) = throw new UnsupportedOperationException()
def deleteFile(requestor: AnyRef, vFile: VirtualFile) = throw new UnsupportedOperationException()
override def extractPresentableUrl(path: String): String = path.replace('/', File.separatorChar)
}
| ilinum/intellij-scala | src/org/jetbrains/plugins/scala/project/AbsentLocalFile.scala | Scala | apache-2.0 | 2,766 |
package com.twitter.finagle.thrift.transport.netty4
import com.twitter.finagle.thrift.transport.ExceptionFactory
import io.netty.buffer.Unpooled
import io.netty.channel.{
ChannelHandler, ChannelHandlerContext, ChannelOutboundHandlerAdapter,
ChannelPromise, CombinedChannelDuplexHandler}
import io.netty.channel.ChannelHandler.Sharable
/**
* Server codec that converts `ByteBuf`s to and from `Array[Byte]`.
*
* This codec also handles some signalling, specifically it will complete the
* `ChannelPromise` associated with encoding an empty `Array[Byte]`.
*/
private[netty4] object ServerByteBufCodec {
def apply(): ChannelHandler = {
val encoder = ThriftServerArrayToByteBufEncoder
val decoder = ThriftByteBufToArrayDecoder
new CombinedChannelDuplexHandler(decoder, encoder)
}
@Sharable
private object ThriftServerArrayToByteBufEncoder extends ChannelOutboundHandlerAdapter {
override def write(ctx: ChannelHandlerContext, msg: Any, promise: ChannelPromise): Unit =
msg match {
case array: Array[Byte] =>
val buf = Unpooled.wrappedBuffer(array)
ctx.writeAndFlush(buf, promise)
case other =>
val ex = ExceptionFactory.wrongServerWriteType(other)
promise.setFailure(ex)
throw ex
}
}
}
| koshelev/finagle | finagle-thrift/src/main/scala/com/twitter/finagle/thrift/transport/netty4/ServerByteBufCodec.scala | Scala | apache-2.0 | 1,299 |
package arx.macros
import scala.annotation.Annotation
/**
* Any field annotated with this will be ignored by the @NetworkedAuxData annotation when it modifies a class. We use
* this on fields that do not warrant a full sync when they change, or fields that we manage manually.
*/
class NonTriggering extends Annotation
| nonvirtualthunk/arx-macros | src/main/scala/arx/macros/NonTriggering.scala | Scala | apache-2.0 | 324 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.utils
import org.apache.flink.api.common.typeinfo.{AtomicType, TypeInformation}
import org.apache.flink.api.java.typeutils.{PojoTypeInfo, RowTypeInfo, TupleTypeInfo}
import org.apache.flink.api.scala.typeutils.CaseClassTypeInfo
import org.apache.flink.streaming.api.datastream.DataStream
import org.apache.flink.streaming.api.environment.{LocalStreamEnvironment, StreamExecutionEnvironment}
import org.apache.flink.streaming.api.graph.GlobalDataExchangeMode
import org.apache.flink.streaming.api.scala.{StreamExecutionEnvironment => ScalaStreamExecEnv}
import org.apache.flink.streaming.api.{TimeCharacteristic, environment}
import org.apache.flink.table.api._
import org.apache.flink.table.api.config.ExecutionConfigOptions
import org.apache.flink.table.api.internal.{TableEnvironmentImpl, TableEnvironmentInternal, TableImpl}
import org.apache.flink.table.api.bridge.java.internal.{StreamTableEnvironmentImpl => JavaStreamTableEnvImpl}
import org.apache.flink.table.api.bridge.java.{StreamTableEnvironment => JavaStreamTableEnv}
import org.apache.flink.table.api.bridge.scala.internal.{StreamTableEnvironmentImpl => ScalaStreamTableEnvImpl}
import org.apache.flink.table.api.bridge.scala.{StreamTableEnvironment => ScalaStreamTableEnv}
import org.apache.flink.table.catalog.{CatalogManager, FunctionCatalog, GenericInMemoryCatalog, ObjectIdentifier}
import org.apache.flink.table.data.RowData
import org.apache.flink.table.delegation.{Executor, ExecutorFactory, PlannerFactory}
import org.apache.flink.table.descriptors.ConnectorDescriptorValidator.CONNECTOR_TYPE
import org.apache.flink.table.descriptors.Schema.SCHEMA
import org.apache.flink.table.descriptors.{CustomConnectorDescriptor, DescriptorProperties, Schema}
import org.apache.flink.table.expressions.Expression
import org.apache.flink.table.factories.{ComponentFactoryService, StreamTableSourceFactory}
import org.apache.flink.table.functions._
import org.apache.flink.table.module.ModuleManager
import org.apache.flink.table.operations.{CatalogSinkModifyOperation, ModifyOperation, Operation, QueryOperation}
import org.apache.flink.table.planner.calcite.CalciteConfig
import org.apache.flink.table.planner.delegation.PlannerBase
import org.apache.flink.table.planner.functions.sql.FlinkSqlOperatorTable
import org.apache.flink.table.planner.operations.{DataStreamQueryOperation, PlannerQueryOperation, RichTableSourceQueryOperation}
import org.apache.flink.table.planner.plan.nodes.calcite.LogicalWatermarkAssigner
import org.apache.flink.table.planner.plan.nodes.exec.ExecNode
import org.apache.flink.table.planner.plan.optimize.program._
import org.apache.flink.table.planner.plan.stats.FlinkStatistic
import org.apache.flink.table.planner.plan.utils.{ExecNodePlanDumper, FlinkRelOptUtil}
import org.apache.flink.table.planner.runtime.utils.{TestingAppendTableSink, TestingRetractTableSink, TestingUpsertTableSink}
import org.apache.flink.table.planner.sinks.CollectRowTableSink
import org.apache.flink.table.runtime.types.TypeInfoLogicalTypeConverter.fromLogicalTypeToTypeInfo
import org.apache.flink.table.sinks._
import org.apache.flink.table.sources.{StreamTableSource, TableSource}
import org.apache.flink.table.types.logical.LogicalType
import org.apache.flink.table.types.utils.TypeConversions
import org.apache.flink.table.typeutils.FieldInfoUtils
import org.apache.flink.types.Row
import org.apache.calcite.avatica.util.TimeUnit
import org.apache.calcite.rel.RelNode
import org.apache.calcite.sql.parser.SqlParserPos
import org.apache.calcite.sql.{SqlExplainLevel, SqlIntervalQualifier}
import org.apache.commons.lang3.SystemUtils
import org.junit.Assert.{assertEquals, assertTrue}
import org.junit.Rule
import org.junit.rules.{ExpectedException, TemporaryFolder, TestName}
import _root_.java.math.{BigDecimal => JBigDecimal}
import _root_.java.util
import java.time.Duration
import _root_.scala.collection.JavaConversions._
import _root_.scala.io.Source
/**
* Test base for testing Table API / SQL plans.
*/
abstract class TableTestBase {
// used for accurate exception information checking.
val expectedException: ExpectedException = ExpectedException.none()
// used for get test case method name
val testName: TestName = new TestName
val _tempFolder = new TemporaryFolder
@Rule
def tempFolder: TemporaryFolder = _tempFolder
@Rule
def thrown: ExpectedException = expectedException
@Rule
def name: TestName = testName
def streamTestUtil(conf: TableConfig = new TableConfig): StreamTableTestUtil =
StreamTableTestUtil(this, conf = conf)
def scalaStreamTestUtil(): ScalaStreamTableTestUtil = ScalaStreamTableTestUtil(this)
def javaStreamTestUtil(): JavaStreamTableTestUtil = JavaStreamTableTestUtil(this)
def batchTestUtil(conf: TableConfig = new TableConfig): BatchTableTestUtil =
BatchTableTestUtil(this, conf = conf)
def scalaBatchTestUtil(): ScalaBatchTableTestUtil = ScalaBatchTableTestUtil(this)
def javaBatchTestUtil(): JavaBatchTableTestUtil = JavaBatchTableTestUtil(this)
def verifyTableEquals(expected: Table, actual: Table): Unit = {
val expectedString = FlinkRelOptUtil.toString(TableTestUtil.toRelNode(expected))
val actualString = FlinkRelOptUtil.toString(TableTestUtil.toRelNode(actual))
assertEquals(
"Logical plans do not match",
LogicalPlanFormatUtils.formatTempTableId(expectedString),
LogicalPlanFormatUtils.formatTempTableId(actualString))
}
}
abstract class TableTestUtilBase(test: TableTestBase, isStreamingMode: Boolean) {
protected lazy val diffRepository: DiffRepository = DiffRepository.lookup(test.getClass)
protected val setting: EnvironmentSettings = if (isStreamingMode) {
EnvironmentSettings.newInstance().inStreamingMode().build()
} else {
EnvironmentSettings.newInstance().inBatchMode().build()
}
// a counter for unique table names
private var counter = 0L
private def getNextId: Long = {
counter += 1
counter
}
protected def getTableEnv: TableEnvironment
protected def isBounded: Boolean = !isStreamingMode
def getPlanner: PlannerBase = {
getTableEnv.asInstanceOf[TableEnvironmentImpl].getPlanner.asInstanceOf[PlannerBase]
}
/**
* Creates a table with the given DDL SQL string.
*/
def addTable(ddl: String): Unit = {
getTableEnv.executeSql(ddl)
}
/**
* Create a [[DataStream]] with the given schema,
* and registers this DataStream under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param fields field names
* @tparam T field types
* @return returns the registered [[Table]].
*/
def addDataStream[T: TypeInformation](name: String, fields: Expression*): Table = {
val env = new ScalaStreamExecEnv(new LocalStreamEnvironment())
val dataStream = env.fromElements[T]().javaStream
val tableEnv = getTableEnv
TableTestUtil.createTemporaryView(tableEnv, name, dataStream, Some(fields.toArray))
tableEnv.from(name)
}
/**
* Create a [[TestTableSource]] with the given schema,
* and registers this TableSource under a unique name into the TableEnvironment's catalog.
*
* @param fields field names
* @tparam T field types
* @return returns the registered [[Table]].
*/
def addTableSource[T: TypeInformation](fields: Expression*): Table = {
addTableSource[T](s"Table$getNextId", fields: _*)
}
/**
* Create a [[TestTableSource]] with the given schema,
* and registers this TableSource under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param fields field names
* @tparam T field types
* @return returns the registered [[Table]].
*/
def addTableSource[T: TypeInformation](name: String, fields: Expression*): Table = {
val typeInfo: TypeInformation[T] = implicitly[TypeInformation[T]]
val tableSchema = if (fields.isEmpty) {
val fieldTypes: Array[TypeInformation[_]] = typeInfo match {
case tt: TupleTypeInfo[_] => (0 until tt.getArity).map(tt.getTypeAt).toArray
case ct: CaseClassTypeInfo[_] => (0 until ct.getArity).map(ct.getTypeAt).toArray
case at: AtomicType[_] => Array[TypeInformation[_]](at)
case pojo: PojoTypeInfo[_] => (0 until pojo.getArity).map(pojo.getTypeAt).toArray
case _ => throw new TableException(s"Unsupported type info: $typeInfo")
}
val types = fieldTypes.map(TypeConversions.fromLegacyInfoToDataType)
val names = FieldInfoUtils.getFieldNames(typeInfo)
TableSchema.builder().fields(names, types).build()
} else {
FieldInfoUtils.getFieldsInfo(typeInfo, fields.toArray).toTableSchema
}
addTableSource(name, new TestTableSource(isBounded, tableSchema))
}
/**
* Create a [[TestTableSource]] with the given schema, table stats and unique keys,
* and registers this TableSource under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param types field types
* @param fields field names
* @return returns the registered [[Table]].
*/
def addTableSource(
name: String,
types: Array[TypeInformation[_]],
fields: Array[String]): Table = {
val schema = new TableSchema(fields, types)
val tableSource = new TestTableSource(isBounded, schema)
addTableSource(name, tableSource)
}
/**
* Register this TableSource under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param tableSource table source
* @return returns the registered [[Table]].
*/
def addTableSource(
name: String,
tableSource: TableSource[_]): Table = {
getTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
name, tableSource)
getTableEnv.from(name)
}
/**
* Registers a [[ScalarFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction(name: String, function: ScalarFunction): Unit = {
getTableEnv.registerFunction(name, function)
}
/**
* Registers a [[UserDefinedFunction]] according to FLIP-65.
*/
def addTemporarySystemFunction(name: String, function: UserDefinedFunction): Unit = {
getTableEnv.createTemporarySystemFunction(name, function)
}
/**
* Registers a [[UserDefinedFunction]] class according to FLIP-65.
*/
def addTemporarySystemFunction(name: String, function: Class[_ <: UserDefinedFunction]): Unit = {
getTableEnv.createTemporarySystemFunction(name, function)
}
def verifyPlan(sql: String): Unit = {
doVerifyPlan(sql, Array.empty[ExplainDetail], withRowType = false, printPlanBefore = true)
}
def verifyPlan(sql: String, extraDetails: ExplainDetail*): Unit = {
doVerifyPlan(sql, extraDetails.toArray, withRowType = false, printPlanBefore = true)
}
def verifyPlan(table: Table): Unit = {
doVerifyPlan(table, Array.empty[ExplainDetail], withRowType = false, printPlanBefore = true)
}
def verifyPlan(table: Table, extraDetails: ExplainDetail*): Unit = {
doVerifyPlan(table, extraDetails.toArray, withRowType = false, printPlanBefore = true)
}
def verifyPlanWithType(sql: String): Unit = {
doVerifyPlan(sql, Array.empty[ExplainDetail], withRowType = true, printPlanBefore = true)
}
def verifyPlanWithType(table: Table): Unit = {
doVerifyPlan(table, Array.empty[ExplainDetail], withRowType = true, printPlanBefore = true)
}
def verifyPlanNotExpected(sql: String, notExpected: String*): Unit = {
verifyPlanNotExpected(getTableEnv.sqlQuery(sql), notExpected: _*)
}
def verifyPlanNotExpected(table: Table, notExpected: String*): Unit = {
require(notExpected.nonEmpty)
val relNode = TableTestUtil.toRelNode(table)
val optimizedPlan = getOptimizedPlan(Array(relNode), Array.empty, withRowType = false)
val result = notExpected.forall(!optimizedPlan.contains(_))
val message = s"\\nactual plan:\\n$optimizedPlan\\nnot expected:\\n${notExpected.mkString(", ")}"
assertTrue(message, result)
}
def verifyExplain(stmtSet: StatementSet, extraDetails: ExplainDetail*): Unit = {
doVerifyExplain(
stmtSet.explain(extraDetails: _*),
extraDetails.contains(ExplainDetail.ESTIMATED_COST))
}
def verifyExplain(sql: String): Unit = verifyExplain(getTableEnv.sqlQuery(sql))
def verifyExplain(sql: String, extraDetails: ExplainDetail*): Unit = {
val table = getTableEnv.sqlQuery(sql)
verifyExplain(table, extraDetails: _*)
}
def verifyExplain(table: Table): Unit = {
doVerifyExplain(table.explain(), needReplaceEstimatedCost = false)
}
def verifyExplain(table: Table, extraDetails: ExplainDetail*): Unit = {
doVerifyExplain(
table.explain(extraDetails: _*),
extraDetails.contains(ExplainDetail.ESTIMATED_COST))
}
def verifyExplainInsert(
table: Table,
sink: TableSink[_],
targetPath: String,
extraDetails: ExplainDetail*): Unit = {
val stmtSet = getTableEnv.createStatementSet()
getTableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(targetPath, sink)
stmtSet.addInsert(targetPath, table)
verifyExplain(stmtSet, extraDetails: _*)
}
def doVerifyPlan(
sql: String,
extraDetails: Array[ExplainDetail],
withRowType: Boolean,
printPlanBefore: Boolean): Unit = {
val table = getTableEnv.sqlQuery(sql)
val relNode = TableTestUtil.toRelNode(table)
val optimizedPlan = getOptimizedPlan(Array(relNode), extraDetails, withRowType = withRowType)
assertEqualsOrExpand("sql", sql)
if (printPlanBefore) {
val planBefore = SystemUtils.LINE_SEPARATOR +
FlinkRelOptUtil.toString(
relNode,
SqlExplainLevel.EXPPLAN_ATTRIBUTES,
withRowType = withRowType)
assertEqualsOrExpand("planBefore", planBefore)
}
val actual = SystemUtils.LINE_SEPARATOR + optimizedPlan
assertEqualsOrExpand("planAfter", actual.toString, expand = false)
}
def verifyResource(sql: String): Unit = {
assertEqualsOrExpand("sql", sql)
val table = getTableEnv.sqlQuery(sql)
doVerifyPlan(
table,
Array.empty,
withRowType = false,
printResource = true,
printPlanBefore = false)
}
def doVerifyPlan(
table: Table,
extraDetails: Array[ExplainDetail],
withRowType: Boolean,
printPlanBefore: Boolean): Unit = {
doVerifyPlan(
table = table,
extraDetails,
withRowType = withRowType,
printPlanBefore = printPlanBefore,
printResource = false)
}
def doVerifyPlan(
table: Table,
extraDetails: Array[ExplainDetail],
withRowType: Boolean,
printPlanBefore: Boolean,
printResource: Boolean): Unit = {
val relNode = TableTestUtil.toRelNode(table)
val optimizedPlan = getOptimizedPlan(
Array(relNode),
extraDetails,
withRowType = withRowType,
withResource = printResource)
if (printPlanBefore) {
val planBefore = SystemUtils.LINE_SEPARATOR +
FlinkRelOptUtil.toString(
relNode,
SqlExplainLevel.EXPPLAN_ATTRIBUTES,
withRowType = withRowType)
assertEqualsOrExpand("planBefore", planBefore)
}
val actual = SystemUtils.LINE_SEPARATOR + optimizedPlan
assertEqualsOrExpand("planAfter", actual.toString, expand = false)
}
private def doVerifyExplain(explainResult: String, needReplaceEstimatedCost: Boolean): Unit = {
val actual = if (needReplaceEstimatedCost) {
replaceEstimatedCost(explainResult)
} else {
explainResult
}
assertEqualsOrExpand("explain", TableTestUtil.replaceStageId(actual), expand = false)
}
protected def getOptimizedPlan(
relNodes: Array[RelNode],
extraDetails: Array[ExplainDetail],
withRowType: Boolean,
withResource: Boolean = false): String = {
require(relNodes.nonEmpty)
val planner = getPlanner
val optimizedRels = planner.optimize(relNodes)
val explainLevel = if (extraDetails.contains(ExplainDetail.ESTIMATED_COST)) {
SqlExplainLevel.ALL_ATTRIBUTES
} else {
SqlExplainLevel.EXPPLAN_ATTRIBUTES
}
val withChangelogTraits = extraDetails.contains(ExplainDetail.CHANGELOG_MODE)
optimizedRels.head match {
case _: ExecNode[_, _] =>
val optimizedNodes = planner.translateToExecNodePlan(optimizedRels)
require(optimizedNodes.length == optimizedRels.length)
ExecNodePlanDumper.dagToString(
optimizedNodes,
detailLevel = explainLevel,
withChangelogTraits = withChangelogTraits,
withOutputType = withRowType,
withResource = withResource)
case _ =>
optimizedRels.map { rel =>
FlinkRelOptUtil.toString(
rel,
detailLevel = explainLevel,
withChangelogTraits = withChangelogTraits,
withRowType = withRowType)
}.mkString("\\n")
}
}
/**
* ignore estimated cost, because it may be unstable.
*/
protected def replaceEstimatedCost(s: String): String = {
var str = s.replaceAll("\\\\r\\\\n", "\\n")
val scientificFormRegExpr = "[+-]?[\\\\d]+([\\\\.][\\\\d]*)?([Ee][+-]?[0-9]{0,2})?"
str = str.replaceAll(s"rowcount = $scientificFormRegExpr", "rowcount = ")
str = str.replaceAll(s"$scientificFormRegExpr rows", "rows")
str = str.replaceAll(s"$scientificFormRegExpr cpu", "cpu")
str = str.replaceAll(s"$scientificFormRegExpr io", "io")
str = str.replaceAll(s"$scientificFormRegExpr network", "network")
str = str.replaceAll(s"$scientificFormRegExpr memory", "memory")
str
}
protected def assertEqualsOrExpand(tag: String, actual: String, expand: Boolean = true): Unit = {
val expected = s"$${$tag}"
if (!expand) {
diffRepository.assertEquals(test.name.getMethodName, tag, expected, actual)
return
}
val expanded = diffRepository.expand(test.name.getMethodName, tag, expected)
if (expanded != null && !expanded.equals(expected)) {
// expected does exist, check result
diffRepository.assertEquals(test.name.getMethodName, tag, expected, actual)
} else {
// expected does not exist, update
diffRepository.expand(test.name.getMethodName, tag, actual)
}
}
}
abstract class TableTestUtil(
test: TableTestBase,
// determines if the table environment should work in a batch or streaming mode
isStreamingMode: Boolean,
catalogManager: Option[CatalogManager] = None,
val tableConfig: TableConfig)
extends TableTestUtilBase(test, isStreamingMode) {
protected val testingTableEnv: TestingTableEnvironment =
TestingTableEnvironment.create(setting, catalogManager, tableConfig)
val tableEnv: TableEnvironment = testingTableEnv
tableEnv.getConfig.getConfiguration.setString(
ExecutionConfigOptions.TABLE_EXEC_SHUFFLE_MODE,
GlobalDataExchangeMode.ALL_EDGES_PIPELINED.toString)
private val env: StreamExecutionEnvironment = getPlanner.getExecEnv
override def getTableEnv: TableEnvironment = tableEnv
def getStreamEnv: StreamExecutionEnvironment = env
/**
* Create a [[TestTableSource]] with the given schema, table stats and unique keys,
* and registers this TableSource under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param types field types
* @param fields field names
* @param statistic statistic of current table
* @return returns the registered [[Table]].
*/
def addTableSource(
name: String,
types: Array[TypeInformation[_]],
fields: Array[String],
statistic: FlinkStatistic = FlinkStatistic.UNKNOWN): Table = {
val schema = new TableSchema(fields, types)
val tableSource = new TestTableSource(isBounded, schema)
addTableSource(name, tableSource, statistic)
}
/**
* Register this TableSource under given name into the TableEnvironment's catalog.
*
* @param name table name
* @param tableSource table source
* @param statistic statistic of current table
* @return returns the registered [[Table]].
*/
def addTableSource(
name: String,
tableSource: TableSource[_],
statistic: FlinkStatistic): Table = {
// TODO RichTableSourceQueryOperation should be deleted and use registerTableSourceInternal
// method instead of registerTable method here after unique key in TableSchema is ready
// and setting catalog statistic to TableSourceTable in DatabaseCalciteSchema is ready
val identifier = ObjectIdentifier.of(
testingTableEnv.getCurrentCatalog,
testingTableEnv.getCurrentDatabase,
name)
val operation = new RichTableSourceQueryOperation(
identifier,
tableSource,
statistic)
val table = testingTableEnv.createTable(operation)
testingTableEnv.registerTable(name, table)
testingTableEnv.from(name)
}
/**
* @deprecated Use [[addTemporarySystemFunction()]] for the new type inference.
*/
@deprecated
def addFunction[T: TypeInformation](
name: String,
function: TableFunction[T]): Unit = testingTableEnv.registerFunction(name, function)
/**
* @deprecated Use [[addTemporarySystemFunction()]] for the new type inference.
*/
@deprecated
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: AggregateFunction[T, ACC]): Unit = testingTableEnv.registerFunction(name, function)
/**
* @deprecated Use [[addTemporarySystemFunction()]] for the new type inference.
*/
@deprecated
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: TableAggregateFunction[T, ACC]): Unit = {
testingTableEnv.registerFunction(name, function)
}
def verifyPlanInsert(sql: String): Unit = {
doVerifyPlanInsert(sql, Array.empty, withRowType = false, printPlanBefore = true)
}
def verifyPlanInsert(
table: Table,
sink: TableSink[_],
targetPath: String,
extraDetails: ExplainDetail*): Unit = {
val stmtSet = tableEnv.createStatementSet()
tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSinkInternal(targetPath, sink)
stmtSet.addInsert(targetPath, table)
verifyPlan(stmtSet, extraDetails: _*)
}
def verifyPlan(stmtSet: StatementSet, extraDetails: ExplainDetail*): Unit = {
doVerifyPlan(stmtSet, extraDetails.toArray, withRowType = false, printPlanBefore = true)
}
def doVerifyPlanInsert(
sql: String,
extraDetails: Array[ExplainDetail],
withRowType: Boolean,
printPlanBefore: Boolean): Unit = {
assertEqualsOrExpand("sql", sql)
val stmtSet = tableEnv.createStatementSet()
stmtSet.addInsertSql(sql)
doVerifyPlan(stmtSet, extraDetails, withRowType, printPlanBefore)
}
def doVerifyPlan(
stmtSet: StatementSet,
extraDetails: Array[ExplainDetail],
withRowType: Boolean,
printPlanBefore: Boolean): Unit = {
val testStmtSet = stmtSet.asInstanceOf[TestingStatementSet]
val relNodes = testStmtSet.getOperations.map(getPlanner.translateToRel)
if (relNodes.isEmpty) {
throw new TableException("No output table have been created yet. " +
"A program needs at least one output table that consumes data.\\n" +
"Please create output table(s) for your program")
}
val optimizedPlan = getOptimizedPlan(
relNodes.toArray,
extraDetails,
withRowType = withRowType)
if (printPlanBefore) {
val planBefore = new StringBuilder
relNodes.foreach { sink =>
planBefore.append(System.lineSeparator)
planBefore.append(FlinkRelOptUtil.toString(sink, SqlExplainLevel.EXPPLAN_ATTRIBUTES))
}
assertEqualsOrExpand("planBefore", planBefore.toString())
}
val actual = if (extraDetails.contains(ExplainDetail.ESTIMATED_COST)) {
SystemUtils.LINE_SEPARATOR + replaceEstimatedCost(optimizedPlan)
} else {
SystemUtils.LINE_SEPARATOR + optimizedPlan
}
assertEqualsOrExpand("planAfter", actual.toString, expand = false)
}
}
abstract class ScalaTableTestUtil(
test: TableTestBase,
isStreamingMode: Boolean)
extends TableTestUtilBase(test, isStreamingMode) {
// scala env
val env = new ScalaStreamExecEnv(new LocalStreamEnvironment())
// scala tableEnv
val tableEnv: ScalaStreamTableEnv = ScalaStreamTableEnv.create(env, setting)
override def getTableEnv: TableEnvironment = tableEnv
/**
* Registers a [[TableFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation](
name: String,
function: TableFunction[T]): Unit = tableEnv.registerFunction(name, function)
/**
* Registers a [[AggregateFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: AggregateFunction[T, ACC]): Unit = tableEnv.registerFunction(name, function)
/**
* Registers a [[TableAggregateFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: TableAggregateFunction[T, ACC]): Unit = tableEnv.registerFunction(name, function)
}
abstract class JavaTableTestUtil(
test: TableTestBase,
isStreamingMode: Boolean)
extends TableTestUtilBase(test, isStreamingMode) {
// java env
val env = new LocalStreamEnvironment()
// java tableEnv
// use impl class instead of interface class to avoid
// "Static methods in interface require -target:jvm-1.8"
val tableEnv: JavaStreamTableEnv = JavaStreamTableEnvImpl.create(env, setting, new TableConfig)
override def getTableEnv: TableEnvironment = tableEnv
/**
* Registers a [[TableFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation](
name: String,
function: TableFunction[T]): Unit = tableEnv.registerFunction(name, function)
/**
* Registers a [[AggregateFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: AggregateFunction[T, ACC]): Unit = tableEnv.registerFunction(name, function)
/**
* Registers a [[TableAggregateFunction]] under given name into the TableEnvironment's catalog.
*/
def addFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
function: TableAggregateFunction[T, ACC]): Unit = tableEnv.registerFunction(name, function)
}
/**
* Utility for stream table test.
*/
case class StreamTableTestUtil(
test: TableTestBase,
catalogManager: Option[CatalogManager] = None,
conf: TableConfig = new TableConfig)
extends TableTestUtil(test, isStreamingMode = true, catalogManager, conf) {
/**
* Register a table with specific row time field and offset.
*
* @param tableName table name
* @param sourceTable table to register
* @param rowtimeField row time field
* @param offset offset to the row time field value
*/
def addTableWithWatermark(
tableName: String,
sourceTable: Table,
rowtimeField: String,
offset: Long): Unit = {
val sourceRel = TableTestUtil.toRelNode(sourceTable)
val rowtimeFieldIdx = sourceRel.getRowType.getFieldNames.indexOf(rowtimeField)
if (rowtimeFieldIdx < 0) {
throw new TableException(s"$rowtimeField does not exist, please check it")
}
val rexBuilder = sourceRel.getCluster.getRexBuilder
val inputRef = rexBuilder.makeInputRef(sourceRel, rowtimeFieldIdx)
val offsetLiteral = rexBuilder.makeIntervalLiteral(
JBigDecimal.valueOf(offset),
new SqlIntervalQualifier(TimeUnit.MILLISECOND, null, SqlParserPos.ZERO))
val expr = rexBuilder.makeCall(FlinkSqlOperatorTable.MINUS, inputRef, offsetLiteral)
val watermarkAssigner = new LogicalWatermarkAssigner(
sourceRel.getCluster,
sourceRel.getTraitSet,
sourceRel,
rowtimeFieldIdx,
expr
)
val queryOperation = new PlannerQueryOperation(watermarkAssigner)
testingTableEnv.registerTable(tableName, testingTableEnv.createTable(queryOperation))
}
def buildStreamProgram(firstProgramNameToRemove: String): Unit = {
val program = FlinkStreamProgram.buildProgram(tableEnv.getConfig.getConfiguration)
var startRemove = false
program.getProgramNames.foreach {
name =>
if (name.equals(firstProgramNameToRemove)) {
startRemove = true
}
if (startRemove) {
program.remove(name)
}
}
replaceStreamProgram(program)
}
def replaceStreamProgram(program: FlinkChainedProgram[StreamOptimizeContext]): Unit = {
var calciteConfig = TableConfigUtils.getCalciteConfig(tableEnv.getConfig)
calciteConfig = CalciteConfig.createBuilder(calciteConfig)
.replaceStreamProgram(program).build()
tableEnv.getConfig.setPlannerConfig(calciteConfig)
}
def getStreamProgram(): FlinkChainedProgram[StreamOptimizeContext] = {
val tableConfig = tableEnv.getConfig
val calciteConfig = TableConfigUtils.getCalciteConfig(tableConfig)
calciteConfig.getStreamProgram.getOrElse(FlinkStreamProgram.buildProgram(
tableConfig.getConfiguration))
}
def enableMiniBatch(): Unit = {
tableEnv.getConfig.getConfiguration.setBoolean(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ENABLED, true)
tableEnv.getConfig.getConfiguration.set(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_ALLOW_LATENCY, Duration.ofSeconds(1))
tableEnv.getConfig.getConfiguration.setLong(
ExecutionConfigOptions.TABLE_EXEC_MINIBATCH_SIZE, 3L)
}
def createAppendTableSink(
fieldNames: Array[String],
fieldTypes: Array[LogicalType]): AppendStreamTableSink[Row] = {
require(fieldNames.length == fieldTypes.length)
val typeInfos = fieldTypes.map(fromLogicalTypeToTypeInfo)
new TestingAppendTableSink().configure(fieldNames, typeInfos)
}
def createUpsertTableSink(
keys: Array[Int],
fieldNames: Array[String],
fieldTypes: Array[LogicalType]): UpsertStreamTableSink[RowData] = {
require(fieldNames.length == fieldTypes.length)
val typeInfos = fieldTypes.map(fromLogicalTypeToTypeInfo)
new TestingUpsertTableSink(keys).configure(fieldNames, typeInfos)
}
def createRetractTableSink(
fieldNames: Array[String],
fieldTypes: Array[LogicalType]): RetractStreamTableSink[Row] = {
require(fieldNames.length == fieldTypes.length)
val typeInfos = fieldTypes.map(fromLogicalTypeToTypeInfo)
new TestingRetractTableSink().configure(fieldNames, typeInfos)
}
}
/**
* Utility for stream scala table test.
*/
case class ScalaStreamTableTestUtil(test: TableTestBase) extends ScalaTableTestUtil(test, true) {
}
/**
* Utility for stream java table test.
*/
case class JavaStreamTableTestUtil(test: TableTestBase) extends JavaTableTestUtil(test, true) {
}
/**
* Utility for batch table test.
*/
case class BatchTableTestUtil(
test: TableTestBase,
catalogManager: Option[CatalogManager] = None,
conf: TableConfig = new TableConfig)
extends TableTestUtil(test, isStreamingMode = false, catalogManager, conf) {
def buildBatchProgram(firstProgramNameToRemove: String): Unit = {
val program = FlinkBatchProgram.buildProgram(tableEnv.getConfig.getConfiguration)
var startRemove = false
program.getProgramNames.foreach {
name =>
if (name.equals(firstProgramNameToRemove)) {
startRemove = true
}
if (startRemove) {
program.remove(name)
}
}
replaceBatchProgram(program)
}
def replaceBatchProgram(program: FlinkChainedProgram[BatchOptimizeContext]): Unit = {
var calciteConfig = TableConfigUtils.getCalciteConfig(tableEnv.getConfig)
calciteConfig = CalciteConfig.createBuilder(calciteConfig)
.replaceBatchProgram(program).build()
tableEnv.getConfig.setPlannerConfig(calciteConfig)
}
def getBatchProgram(): FlinkChainedProgram[BatchOptimizeContext] = {
val tableConfig = tableEnv.getConfig
val calciteConfig = TableConfigUtils.getCalciteConfig(tableConfig)
calciteConfig.getBatchProgram.getOrElse(FlinkBatchProgram.buildProgram(
tableConfig.getConfiguration))
}
def createCollectTableSink(
fieldNames: Array[String],
fieldTypes: Array[LogicalType]): TableSink[Row] = {
require(fieldNames.length == fieldTypes.length)
val typeInfos = fieldTypes.map(fromLogicalTypeToTypeInfo)
new CollectRowTableSink().configure(fieldNames, typeInfos)
}
}
/**
* Utility for batch scala table test.
*/
case class ScalaBatchTableTestUtil(test: TableTestBase) extends ScalaTableTestUtil(test, false) {
}
/**
* Utility for batch java table test.
*/
case class JavaBatchTableTestUtil(test: TableTestBase) extends JavaTableTestUtil(test, false) {
}
/**
* Batch/Stream [[org.apache.flink.table.sources.TableSource]] for testing.
*/
class TestTableSource(override val isBounded: Boolean, schema: TableSchema)
extends StreamTableSource[Row] {
override def getDataStream(execEnv: environment.StreamExecutionEnvironment): DataStream[Row] = {
execEnv.fromCollection(List[Row](), getReturnType)
}
override def getReturnType: TypeInformation[Row] = {
val logicalTypes = schema.getFieldTypes
new RowTypeInfo(logicalTypes, schema.getFieldNames)
}
override def getTableSchema: TableSchema = schema
}
object TestTableSource {
def createTemporaryTable(
tEnv: TableEnvironment,
isBounded: Boolean,
tableSchema: TableSchema,
tableName: String): Unit = {
tEnv.connect(
new CustomConnectorDescriptor("TestTableSource", 1, false)
.property("is-bounded", if (isBounded) "true" else "false"))
.withSchema(new Schema().schema(tableSchema))
.createTemporaryTable(tableName)
}
}
class TestTableSourceFactory extends StreamTableSourceFactory[Row] {
override def createStreamTableSource(
properties: util.Map[String, String]): StreamTableSource[Row] = {
val dp = new DescriptorProperties
dp.putProperties(properties)
val tableSchema = dp.getTableSchema(SCHEMA)
val isBounded = dp.getOptionalBoolean("is-bounded").orElse(false)
new TestTableSource(isBounded, tableSchema)
}
override def requiredContext(): util.Map[String, String] = {
val context = new util.HashMap[String, String]()
context.put(CONNECTOR_TYPE, "TestTableSource")
context
}
override def supportedProperties(): util.List[String] = {
val properties = new util.ArrayList[String]()
properties.add("*")
properties
}
}
class TestingTableEnvironment private(
catalogManager: CatalogManager,
moduleManager: ModuleManager,
tableConfig: TableConfig,
executor: Executor,
functionCatalog: FunctionCatalog,
planner: PlannerBase,
isStreamingMode: Boolean,
userClassLoader: ClassLoader)
extends TableEnvironmentImpl(
catalogManager,
moduleManager,
tableConfig,
executor,
functionCatalog,
planner,
isStreamingMode,
userClassLoader) {
// just for testing, remove this method while
// `<T, ACC> void registerFunction(String name, AggregateFunction<T, ACC> aggregateFunction);`
// is added into TableEnvironment
def registerFunction[T: TypeInformation](name: String, tf: TableFunction[T]): Unit = {
val typeInfo = UserDefinedFunctionHelper
.getReturnTypeOfTableFunction(tf, implicitly[TypeInformation[T]])
functionCatalog.registerTempSystemTableFunction(
name,
tf,
typeInfo
)
}
// just for testing, remove this method while
// `<T> void registerFunction(String name, TableFunction<T> tableFunction);`
// is added into TableEnvironment
def registerFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
f: AggregateFunction[T, ACC]): Unit = {
registerImperativeAggregateFunction(name, f)
}
// just for testing, remove this method while
// `<T, ACC> void registerFunction(String name, TableAggregateFunction<T, ACC> tableAggFunc);`
// is added into TableEnvironment
def registerFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
f: TableAggregateFunction[T, ACC]): Unit = {
registerImperativeAggregateFunction(name, f)
}
private def registerImperativeAggregateFunction[T: TypeInformation, ACC: TypeInformation](
name: String,
f: ImperativeAggregateFunction[T, ACC]): Unit = {
val typeInfo = UserDefinedFunctionHelper
.getReturnTypeOfAggregateFunction(f, implicitly[TypeInformation[T]])
val accTypeInfo = UserDefinedFunctionHelper
.getAccumulatorTypeOfAggregateFunction(f, implicitly[TypeInformation[ACC]])
functionCatalog.registerTempSystemAggregateFunction(
name,
f,
typeInfo,
accTypeInfo
)
}
override def createTable(tableOperation: QueryOperation): TableImpl = {
super.createTable(tableOperation)
}
override def createStatementSet(): StatementSet = new TestingStatementSet(this)
}
class TestingStatementSet(tEnv: TestingTableEnvironment) extends StatementSet {
private val operations: util.List[ModifyOperation] = new util.ArrayList[ModifyOperation]
def getOperations: util.List[ModifyOperation] = operations
override def addInsertSql(statement: String): StatementSet = {
val operations = tEnv.getParser.parse(statement)
if (operations.size != 1) {
throw new TableException("Only single statement is supported.")
}
operations.get(0) match {
case op: ModifyOperation =>
this.operations.add(op)
case _ =>
throw new TableException("Only insert statement is supported now.")
}
this
}
override def addInsert(targetPath: String, table: Table): StatementSet = {
this.addInsert(targetPath, table, overwrite = false)
}
override def addInsert(targetPath: String, table: Table, overwrite: Boolean): StatementSet = {
val unresolvedIdentifier = tEnv.getParser.parseIdentifier(targetPath)
val objectIdentifier = tEnv.getCatalogManager.qualifyIdentifier(unresolvedIdentifier)
operations.add(new CatalogSinkModifyOperation(
objectIdentifier,
table.getQueryOperation,
util.Collections.emptyMap[String, String],
overwrite,
util.Collections.emptyMap[String, String]))
this
}
override def explain(extraDetails: ExplainDetail*): String = {
tEnv.explainInternal(operations.map(o => o.asInstanceOf[Operation]), extraDetails: _*)
}
override def execute(): TableResult = {
try {
tEnv.executeInternal(operations)
} finally {
operations.clear()
}
}
}
object TestingTableEnvironment {
def create(
settings: EnvironmentSettings,
catalogManager: Option[CatalogManager] = None,
tableConfig: TableConfig): TestingTableEnvironment = {
// temporary solution until FLINK-15635 is fixed
val classLoader = Thread.currentThread.getContextClassLoader
val moduleManager = new ModuleManager
val catalogMgr = catalogManager match {
case Some(c) => c
case _ =>
CatalogManager.newBuilder
.classLoader(classLoader)
.config(tableConfig.getConfiguration)
.defaultCatalog(
settings.getBuiltInCatalogName,
new GenericInMemoryCatalog(
settings.getBuiltInCatalogName,
settings.getBuiltInDatabaseName))
.build
}
val functionCatalog = new FunctionCatalog(tableConfig, catalogMgr, moduleManager)
val executorProperties = settings.toExecutorProperties
val executor = ComponentFactoryService.find(classOf[ExecutorFactory],
executorProperties).create(executorProperties)
val plannerProperties = settings.toPlannerProperties
val planner = ComponentFactoryService.find(classOf[PlannerFactory], plannerProperties)
.create(plannerProperties, executor, tableConfig, functionCatalog, catalogMgr)
.asInstanceOf[PlannerBase]
new TestingTableEnvironment(
catalogMgr,
moduleManager,
tableConfig,
executor,
functionCatalog,
planner,
settings.isStreamingMode,
classLoader)
}
}
object TableTestUtil {
val STREAM_SETTING: EnvironmentSettings =
EnvironmentSettings.newInstance().inStreamingMode().build()
val BATCH_SETTING: EnvironmentSettings = EnvironmentSettings.newInstance().inBatchMode().build()
/**
* Converts operation tree in the given table to a RelNode tree.
*/
def toRelNode(table: Table): RelNode = {
table.asInstanceOf[TableImpl]
.getTableEnvironment.asInstanceOf[TableEnvironmentImpl]
.getPlanner.asInstanceOf[PlannerBase]
.getRelBuilder.queryOperation(table.getQueryOperation).build()
}
def createTemporaryView[T](
tEnv: TableEnvironment,
name: String,
dataStream: DataStream[T],
fields: Option[Array[Expression]] = None,
fieldNullables: Option[Array[Boolean]] = None,
statistic: Option[FlinkStatistic] = None): Unit = {
val planner = tEnv.asInstanceOf[TableEnvironmentImpl].getPlanner.asInstanceOf[PlannerBase]
val execEnv = planner.getExecEnv
val streamType = dataStream.getType
// get field names and types for all non-replaced fields
val typeInfoSchema = fields.map((f: Array[Expression]) => {
val fieldsInfo = FieldInfoUtils.getFieldsInfo(streamType, f)
// check if event-time is enabled
if (fieldsInfo.isRowtimeDefined &&
(execEnv.getStreamTimeCharacteristic ne TimeCharacteristic.EventTime)) {
throw new ValidationException(String.format(
"A rowtime attribute requires an EventTime time characteristic in stream " +
"environment. But is: %s",
execEnv.getStreamTimeCharacteristic))
}
fieldsInfo
}).getOrElse(FieldInfoUtils.getFieldsInfo(streamType))
val fieldCnt = typeInfoSchema.getFieldTypes.length
val dataStreamQueryOperation = new DataStreamQueryOperation(
ObjectIdentifier.of(tEnv.getCurrentCatalog, tEnv.getCurrentDatabase, name),
dataStream,
typeInfoSchema.getIndices,
typeInfoSchema.toTableSchema,
fieldNullables.getOrElse(Array.fill(fieldCnt)(true)),
statistic.getOrElse(FlinkStatistic.UNKNOWN)
)
val table = createTable(tEnv, dataStreamQueryOperation)
tEnv.registerTable(name, table)
}
def createTable(tEnv: TableEnvironment, queryOperation: QueryOperation): Table = {
val createTableMethod = tEnv match {
case _: ScalaStreamTableEnvImpl | _: JavaStreamTableEnvImpl =>
tEnv.getClass.getSuperclass.getDeclaredMethod("createTable", classOf[QueryOperation])
case t: TableEnvironmentImpl =>
t.getClass.getDeclaredMethod("createTable", classOf[QueryOperation])
case _ => throw new TableException(s"Unsupported class: ${tEnv.getClass.getCanonicalName}")
}
createTableMethod.setAccessible(true)
createTableMethod.invoke(tEnv, queryOperation).asInstanceOf[Table]
}
def readFromResource(path: String): String = {
val inputStream = getClass.getResource(path).getFile
Source.fromFile(inputStream).mkString
}
/**
* Stage {id} is ignored, because id keeps incrementing in test class
* while StreamExecutionEnvironment is up
*/
def replaceStageId(s: String): String = {
s.replaceAll("\\\\r\\\\n", "\\n").replaceAll("Stage \\\\d+", "")
}
}
| greghogan/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/utils/TableTestBase.scala | Scala | apache-2.0 | 44,306 |
package drt.client.services
import drt.client.SPAMain
import drt.client.logger.LoggerFactory
import drt.client.logger.log
import drt.client.modules.GoogleEventTracker
import org.scalajs.dom
import org.scalajs.dom.Event
object ErrorHandler {
def registerGlobalErrorHandler(): Unit = {
LoggerFactory.getLogger("ErrorHandler").debug("Registering global error handler for uncaught exceptions")
dom.window.onerror = (ev: Event, url: String, line: Int, col: Int, error: Any) => {
val serverLogger = LoggerFactory.getXHRLogger("error")
val message = s"Event: $ev, Url: $url, Line: $line, Col: $col, Error: $error, Browser: ${dom.window.navigator.appVersion}"
GoogleEventTracker.sendError(message, fatal = true)
error match {
case e: Exception =>
log.error(message, e)
serverLogger.error(message, e)
case _ =>
serverLogger.error(message)
}
val reload = dom.window.confirm("Sorry, we have encountered an error. The error has been logged. Would you like to reload the page?")
if (reload) {
dom.window.location.reload(true)
}
}
}
}
| UKHomeOffice/drt-scalajs-spa-exploration | client/src/main/scala/drt/client/services/ErrorHandler.scala | Scala | apache-2.0 | 1,143 |
object Macros {
def foo(x: Any) = macro Impls.foo
}
object Test extends App {
import Macros._
foo(x)
} | felixmulder/scala | test/files/neg/macro-noexpand/Macros_Test_2.scala | Scala | bsd-3-clause | 109 |
package com.michalrus.nofatty.ui
import java.awt.{ BorderLayout, Dimension, Toolkit }
import javax.swing._
import com.michalrus.nofatty.Logging
import com.michalrus.nofatty.data.{ Days, Products }
import com.michalrus.nofatty.ui.utils._
import org.jfree.chart.ChartPanel
import org.joda.time.LocalDate
object Ui extends Logging {
val ChartDays = 100
def initialize(): Unit = edt {
timed("initializing the UI") {
val f = timed("creating the frame") {
UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName)
{ val _ = UIManager.getLookAndFeelDefaults.put("Slider.paintValue", false) }
{ val _ = UIManager.put("Slider.paintValue", false) }
try {
val xToolkit = Toolkit.getDefaultToolkit
val awtAppClassNameField = xToolkit.getClass.getDeclaredField("awtAppClassName")
awtAppClassNameField.setAccessible(true)
awtAppClassNameField.set(xToolkit, "nofatty")
}
catch {
case _: NoSuchFieldException ⇒
}
val f = new JFrame
f.setTitle("nofatty")
f.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE)
f.setSize(new Dimension(950, 700))
f.setMinimumSize(f.getSize)
f.setLayout(new BorderLayout)
f.setLocationRelativeTo(Unsafe.NullComponent)
Option(getClass.getResource("/icon.png")) foreach { ic ⇒
f.setIconImage(new ImageIcon(ic).getImage)
}
f
}
val ltv = timed("creating the left tabbed view") {
val ltv = new JTabbedPane()
f.add(ltv, BorderLayout.LINE_START)
ltv
}
val today = timed("loading joda-time") { LocalDate.now }
val _ = Products
val charts = {
import com.michalrus.nofatty.chart._
val cs = timed("creating charts") { List(EnergyIntake, StackedRatios, FatCarbohydrate) }
timed(s"loading last $ChartDays days into plots") {
val days = Days.between(today minusDays ChartDays, today) map (d ⇒ (d.date, Some(d)))
cs foreach (_.refresh(days))
}
cs
}
val inputPane = timed("creating InputPane") {
val inputPane = new InputPane(date ⇒ charts foreach (_.refresh(Seq((date, Days.find(date))))))
ltv.addTab("Daily input", inputPane)
inputPane
}
timed("creating ProductListPane") {
ltv.addTab("Products", new ProductListPane({ editedUuid ⇒
val days = Days.usingProduct(editedUuid) filter (org.joda.time.Days.daysBetween(_, today).getDays < ChartDays) map (d ⇒ (d, Days find d))
charts foreach (_.refresh(days))
inputPane.refresh()
}))
}
timed("creating PrefsPane") {
ltv.addTab("Prefs", new PrefsPane(charts.foreach(_ refresh Nil)))
}
def rtv(select: Int): JTabbedPane = {
val r = new JTabbedPane()
charts foreach (ch ⇒ r.addTab(ch.title, new ChartPanel(ch.chart)))
r.setSelectedIndex(select)
r
}
val split = timed("creating ChartPanels") {
val split = new JSplitPane(JSplitPane.VERTICAL_SPLIT, rtv(0), rtv(1))
split.setContinuousLayout(true)
split.setResizeWeight(0.5)
f.add(split, BorderLayout.CENTER)
split
}
timed("displaying the frame") {
f.setVisible(true)
split.setDividerLocation(0.5)
ltv.setPreferredSize(new Dimension(360, 0))
}
}
}
}
| michalrus/nofatty | src/main/scala/com/michalrus/nofatty/ui/Ui.scala | Scala | apache-2.0 | 3,462 |
package uk.gov.bis.levyApiMock.actions
import play.api.mvc.Results._
import play.api.mvc.{ActionBuilder, Request, Result, WrappedRequest}
import uk.gov.bis.levyApiMock.data.oauth2.AuthRecord
import scala.concurrent.{ExecutionContext, Future}
case class AuthRequest[+A](authRecord: AuthRecord, request: Request[A]) extends WrappedRequest(request)
trait AuthAction extends ActionBuilder[AuthRequest] {
implicit val ec: ExecutionContext
override def invokeBlock[A](request: Request[A], next: (AuthRequest[A]) => Future[Result]): Future[Result] = {
val BearerToken = "Bearer (.+)".r
request.headers.get("Authorization") match {
case Some(BearerToken(accessToken)) => validateToken(accessToken).flatMap {
case Some(authRecord) => next(AuthRequest(authRecord, request))
case None => unauthorized("Bearer token does not grant access to the requested resource")
}
case Some(h) => unauthorized("Authorization header should be a Bearer token")
case None => unauthorized("No Authorization header found")
}
}
def validateToken(accessToken: String): Future[Option[AuthRecord]]
private def unauthorized(message: String): Future[Result] = Future.successful(Unauthorized(message))
}
| SkillsFundingAgency/das-alpha-hmrc-api-mock | src/main/scala/uk/gov/bis/levyApiMock/actions/AuthAction.scala | Scala | mit | 1,237 |
object Test {
def main(args: Array[String]): Unit = {
import scala.tools.reflect.ToolBox
val m = reflect.runtime.currentMirror
val u = m.universe
import u._
val tb = m.mkToolBox();
tb.compile(q"new p.Varargs(null, null)")
tb.compile(q"p.Varargs.staticMethod(null, null)")
tb.compile(q"(null: p.Varargs).instanceMethod(null, null)")
}
}
| felixmulder/scala | test/files/run/toolbox-varargs/Test.scala | Scala | bsd-3-clause | 381 |
package com.blinkbox.books.spray
import com.codahale.metrics.health.HealthCheck
import org.json4s.JsonAST.{JBool, JString}
import org.json4s.jackson.JsonMethods._
import org.mockito.Mockito.when
import org.scalatest.mock.MockitoSugar
import org.scalatest.{FunSuite, Matchers}
import spray.http.CacheDirectives._
import spray.http.HttpHeaders._
import spray.http.MediaTypes._
import spray.http.StatusCodes._
import spray.http.Uri.Path
import spray.testkit.ScalatestRouteTest
class HealthCheckHttpServiceTests extends FunSuite with ScalatestRouteTest with MockitoSugar with Matchers {
test("Ping returns the word 'pong' as text/plain content") {
val service = basicHealthCheckService()
Get("/health/ping") ~> service.routes ~> check {
assert(contentType.mediaType == `text/plain`)
assert(body.asString == "pong")
}
}
test("Ping does not allow the response to be cached") {
val service = basicHealthCheckService()
Get("/health/ping") ~> service.routes ~> check {
assert(header[`Cache-Control`] == Some(`Cache-Control`(`no-store`)))
}
}
test("Ping returns an X-App-Version header") {
val service = basicHealthCheckService()
Get("/health/ping") ~> service.routes ~> check {
assert(header("X-App-Version").isDefined)
}
}
test("Health report returns application/json content") {
val service = basicHealthCheckService()
Get("/health/report") ~> service.routes ~> check {
assert(contentType.mediaType == `application/json`)
}
}
test("Health report does not allow the response to be cached") {
val service = basicHealthCheckService()
Get("/health/report") ~> service.routes ~> check {
assert(header[`Cache-Control`] == Some(`Cache-Control`(`no-store`)))
}
}
test("Health report returns healthy status for a passed check") {
val healthCheck = mock[HealthCheck]
when(healthCheck.execute()).thenReturn(HealthCheck.Result.healthy("A message!"))
val service = basicHealthCheckService()
service.healthChecks.register("good", healthCheck)
Get("/health/report") ~> service.routes ~> check {
assert(status == OK)
val json = parse(body.asString)
assert((json \\\\ "good" \\\\ "healthy") == JBool(true))
assert((json \\\\ "good" \\\\ "message") == JString("A message!"))
}
}
test("Health report returns unhealthy status for a failed check") {
val healthCheck = mock[HealthCheck]
when(healthCheck.execute()).thenReturn(HealthCheck.Result.unhealthy("A sad message :-("))
val service = basicHealthCheckService()
service.healthChecks.register("bad", healthCheck)
Get("/health/report") ~> service.routes ~> check {
assert(status == InternalServerError)
val json = parse(body.asString)
assert((json \\\\ "bad" \\\\ "healthy") == JBool(false))
assert((json \\\\ "bad" \\\\ "message") == JString("A sad message :-("))
}
}
test("Health report can be mounted at non-root URLs") {
val service = basicHealthCheckService("/some/root")
Get("/some/root/health/report") ~> service.routes ~> check {
assert(contentType.mediaType == `application/json`)
}
}
test("Health report returns an X-App-Version header") {
val service = basicHealthCheckService()
Get("/health/report") ~> service.routes ~> check {
assert(header("X-App-Version").isDefined)
}
}
test("Thread dump returns a thread dump as text/plain content") {
val service = basicHealthCheckService()
Get("/health/threads") ~> service.routes ~> check {
assert(contentType.mediaType == `text/plain`)
assert(body.asString.length > 0) // not sure how else to check this content!
}
}
test("Thread dump does not allow the response to be cached") {
val service = basicHealthCheckService()
Get("/health/threads") ~> service.routes ~> check {
assert(header[`Cache-Control`] == Some(`Cache-Control`(`no-store`)))
}
}
test("Thread dump returns an X-App-Version header") {
val service = basicHealthCheckService()
Get("/health/threads") ~> service.routes ~> check {
assert(header("X-App-Version").isDefined)
}
}
private def basicHealthCheckService(root: String = "/") =
new HealthCheckHttpService {
override implicit def actorRefFactory = system
override val basePath = Path(root)
}
}
| blinkboxbooks/common-spray.scala | src/test/scala/com/blinkbox/books/spray/HealthCheckHttpServiceTests.scala | Scala | mit | 4,346 |
/*
* Copyright 2012 Eike Kettner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eknet.publet.auth.store
/**
* @author Eike Kettner [email protected]
* @since 06.11.12 00:09
*/
trait PermissionStore {
/**
* Associates a permission string to the named group.
* @param group
* @param perm
*/
def addPermission(group: String, perm: String)
/**
* Removes the given associated permission string from
* the group. If it is not associated, this just returns.
*
* @param group
* @param perm
*/
def dropPermission(group: String, perm: String)
/**
* Returns the union set of all permissions of all groups.
*
* @param group
* @return
*/
def getPermissions(group: String*): Set[String]
/**
* Returns any permissions specific to a certain user.
*
* @param login
* @return
*/
def getUserPermissions(login: String): Set[String]
}
| eikek/publet | auth/src/main/scala/org/eknet/publet/auth/store/PermissionStore.scala | Scala | apache-2.0 | 1,433 |
package vexriscv
import spinal.core._
import spinal.lib.bus.bmb.{Bmb, BmbAccessCapabilities, BmbAccessParameter, BmbImplicitDebugDecoder, BmbInvalidationParameter, BmbParameter, BmbInterconnectGenerator}
import spinal.lib.bus.misc.AddressMapping
import spinal.lib.com.jtag.{Jtag, JtagTapInstructionCtrl}
import spinal.lib.generator._
import spinal.lib.slave
import vexriscv.plugin._
import spinal.core.fiber._
object VexRiscvBmbGenerator{
val DEBUG_NONE = 0
val DEBUG_JTAG = 1
val DEBUG_JTAG_CTRL = 2
val DEBUG_BUS = 3
val DEBUG_BMB = 4
}
case class VexRiscvBmbGenerator()(implicit interconnectSmp: BmbInterconnectGenerator = null) extends Area {
import VexRiscvBmbGenerator._
val config = Handle[VexRiscvConfig]
val withDebug = Handle[Int]
val debugClockDomain = Handle[ClockDomain]
val debugReset = Handle[Bool]
val debugAskReset = Handle[() => Unit]
val hardwareBreakpointCount = Handle.sync(0)
val iBus, dBus = Handle[Bmb]
val externalInterrupt = Handle[Bool]
val externalSupervisorInterrupt = Handle[Bool]
val timerInterrupt = Handle[Bool]
val softwareInterrupt = Handle[Bool]
def setTimerInterrupt(that: Handle[Bool]) = Dependable(that, timerInterrupt){timerInterrupt := that}
def setSoftwareInterrupt(that: Handle[Bool]) = Dependable(that, softwareInterrupt){softwareInterrupt := that}
def disableDebug() = {
withDebug.load(DEBUG_NONE)
}
def enableJtag(debugCd : ClockDomainResetGenerator, resetCd : ClockDomainResetGenerator) : Unit = debugCd.rework{
this.debugClockDomain.load(debugCd.outputClockDomain)
val resetBridge = resetCd.asyncReset(debugReset, ResetSensitivity.HIGH)
debugAskReset.loadNothing()
withDebug.load(DEBUG_JTAG)
}
def enableJtagInstructionCtrl(debugCd : ClockDomainResetGenerator, resetCd : ClockDomainResetGenerator) : Unit = debugCd.rework{
this.debugClockDomain.load(debugCd.outputClockDomain)
val resetBridge = resetCd.asyncReset(debugReset, ResetSensitivity.HIGH)
debugAskReset.loadNothing()
withDebug.load(DEBUG_JTAG_CTRL)
}
def enableDebugBus(debugCd : ClockDomainResetGenerator, resetCd : ClockDomainResetGenerator) : Unit = debugCd.rework{
this.debugClockDomain.load(debugCd.outputClockDomain)
val resetBridge = resetCd.asyncReset(debugReset, ResetSensitivity.HIGH)
debugAskReset.loadNothing()
withDebug.load(DEBUG_BUS)
}
val debugBmbAccessSource = Handle[BmbAccessCapabilities]
val debugBmbAccessRequirements = Handle[BmbAccessParameter]
def enableDebugBmb(debugCd : Handle[ClockDomain], resetCd : ClockDomainResetGenerator, mapping : AddressMapping)(implicit debugMaster : BmbImplicitDebugDecoder = null) : Unit = debugCd.on{
this.debugClockDomain.load(debugCd)
val resetBridge = resetCd.asyncReset(debugReset, ResetSensitivity.HIGH)
debugAskReset.loadNothing()
withDebug.load(DEBUG_BMB)
val slaveModel = debugCd on interconnectSmp.addSlave(
accessSource = debugBmbAccessSource,
accessCapabilities = debugBmbAccessSource.derivate(DebugExtensionBus.getBmbAccessParameter(_)),
accessRequirements = debugBmbAccessRequirements,
bus = debugBmb,
mapping = mapping
)
debugBmb.derivatedFrom(debugBmbAccessRequirements)(Bmb(_))
if(debugMaster != null) interconnectSmp.addConnection(debugMaster.bus, debugBmb)
}
val jtag = Handle(withDebug.get == DEBUG_JTAG generate slave(Jtag()))
val jtagInstructionCtrl = withDebug.produce(withDebug.get == DEBUG_JTAG_CTRL generate JtagTapInstructionCtrl())
val debugBus = withDebug.produce(withDebug.get == DEBUG_BUS generate DebugExtensionBus())
val debugBmb = Handle[Bmb]
val jtagClockDomain = Handle[ClockDomain]
val logic = Handle(new Area {
withDebug.get != DEBUG_NONE generate new Area {
config.add(new DebugPlugin(debugClockDomain, hardwareBreakpointCount))
}
val cpu = new VexRiscv(config)
for (plugin <- cpu.plugins) plugin match {
case plugin: IBusSimplePlugin => iBus.load(plugin.iBus.toBmb())
case plugin: DBusSimplePlugin => dBus.load(plugin.dBus.toBmb())
case plugin: IBusCachedPlugin => iBus.load(plugin.iBus.toBmb())
case plugin: DBusCachedPlugin => dBus.load(plugin.dBus.toBmb())
case plugin: CsrPlugin => {
externalInterrupt load plugin.externalInterrupt
timerInterrupt load plugin.timerInterrupt
softwareInterrupt load plugin.softwareInterrupt
if (plugin.config.supervisorGen) externalSupervisorInterrupt load plugin.externalInterruptS
}
case plugin: DebugPlugin => plugin.debugClockDomain {
if(debugAskReset.get != null) when(RegNext(plugin.io.resetOut)) {
debugAskReset.get()
} else {
debugReset.load(RegNext(plugin.io.resetOut))
}
withDebug.get match {
case DEBUG_JTAG => jtag <> plugin.io.bus.fromJtag()
case DEBUG_JTAG_CTRL => jtagInstructionCtrl <> plugin.io.bus.fromJtagInstructionCtrl(jtagClockDomain, 0)
case DEBUG_BUS => debugBus <> plugin.io.bus
case DEBUG_BMB => debugBmb >> plugin.io.bus.fromBmb()
}
}
case _ =>
}
})
logic.soon(debugReset)
val parameterGenerator = new Generator {
val iBusParameter, dBusParameter = product[BmbParameter]
dependencies += config
add task {
for (plugin <- config.plugins) plugin match {
case plugin: IBusSimplePlugin => iBusParameter.load(IBusSimpleBus.getBmbParameter())
case plugin: DBusSimplePlugin => dBusParameter.load(DBusSimpleBus.getBmbParameter())
case plugin: IBusCachedPlugin => iBusParameter.load(plugin.config.getBmbParameter())
case plugin: DBusCachedPlugin => dBusParameter.load(plugin.config.getBmbParameter())
case _ =>
}
}
}
val invalidationSource = Handle[BmbInvalidationParameter]
val invalidationRequirements = Handle[BmbInvalidationParameter]
if(interconnectSmp != null){
interconnectSmp.addMaster(accessRequirements = parameterGenerator.iBusParameter.derivate(_.access), bus = iBus)
interconnectSmp.addMaster(
accessRequirements = parameterGenerator.dBusParameter.derivate(_.access),
invalidationSource = invalidationSource,
invalidationCapabilities = invalidationSource,
invalidationRequirements = invalidationRequirements,
bus = dBus
)
}
}
| SpinalHDL/VexRiscv | src/main/scala/vexriscv/VexRiscvBmbGenerator.scala | Scala | mit | 6,346 |
package im.actor.server.file.local.http
import java.time.{ Duration, Instant }
import akka.actor.ActorSystem
import akka.event.Logging
import akka.http.scaladsl.model.{ HttpResponse, StatusCodes }
import akka.http.scaladsl.model.StatusCodes.OK
import akka.http.scaladsl.model.headers.ContentDispositionTypes.attachment
import akka.http.scaladsl.model.headers.`Content-Disposition`
import akka.http.scaladsl.server.Directives._
import akka.http.scaladsl.server._
import akka.stream.ActorMaterializer
import im.actor.server.api.http.HttpHandler
import im.actor.server.api.http.HttpApiHelpers._
import im.actor.server.file.local.http.fix.GetFileFix
import im.actor.server.file.local.{ FileStorageOperations, LocalFileStorageConfig, RequestSigning }
import im.actor.util.log.AnyRefLogSource
import scala.concurrent.{ ExecutionContext, Future }
import scala.util.{ Failure, Success }
private object FilesRejections {
case object InvalidFileSignature extends ActorCustomRejection
}
private[local] final class FilesHttpHandler(storageConfig: LocalFileStorageConfig)(implicit val system: ActorSystem)
extends HttpHandler
with RequestSigning
with FileStorageOperations
with AnyRefLogSource
with GetFileFix {
import FilesRejections._
protected implicit val mat = ActorMaterializer()
protected implicit val ec: ExecutionContext = system.dispatcher
protected val storageLocation = storageConfig.location
private val log = Logging(system, this)
val rejectionHandler: RejectionHandler =
RejectionHandler.newBuilder()
.handle {
case InvalidFileSignature ⇒
complete(StatusCodes.Forbidden → "Invalid file signature")
}
.result()
// format: OFF
def routes: Route =
extractRequest { request =>
// log.debug("Got file request {}", request)
defaultVersion {
pathPrefix("files" / SignedLongNumber) { fileId =>
options {
log.debug("Responded OK to OPTIONS req: {}", request.uri)
complete(HttpResponse(OK))
} ~
validateRequest {
get {
//we use `Segments` because have to match paths like:
//v1/files/:fileId/:fileName
//v1/files/:fileId
path(Segments(0, 1)) { seqName =>
log.debug("Download file request, fileId: {}", fileId)
onComplete(getFile(fileId)) {
case Success(Some(file)) =>
log.debug("Serving fileId: {}, file: {} parts", fileId, file)
respondWithDefaultHeader(
`Content-Disposition`(attachment, Map("filename" -> file.name))
) {
//TODO: remove as soon, as https://github.com/akka/akka/issues/20338 get fixed
getFromFileFix(file.toJava)
}
case Success(None) =>
complete(HttpResponse(StatusCodes.NotFound))
case Failure(e) =>
log.error(e, "Failed to get file content, fileId: {}", fileId)
complete(HttpResponse(500))
}
}
} ~
put {
pathSuffix(IntNumber) { partNumber =>
log.debug("Upload file part request, fileId: {}, partNumber: {}", fileId, partNumber)
extractRequest { req =>
val writeFu = for {
_ <- prepareForPartWrite(fileId, partNumber)
_ <- appendPartBytes(req.entity.dataBytes, fileId, partNumber)
_ <- Future {}
} yield ()
onComplete(writeFu) {
case Success(_) =>
log.debug("Successfully uploaded part #{} of file: {} ", partNumber, fileId)
complete(HttpResponse(200))
case Failure(e) =>
log.error(e, "Failed to upload file: {}", fileId)
complete(HttpResponse(500))
}
}
}
}
}
}
}
}
// format: ON
def validateRequest: Directive0 =
extractRequestContext.flatMap[Unit] { ctx ⇒
parameters(("signature", "expires".as[Long])) tflatMap {
case (signature, expiresAt) ⇒
val request = ctx.request
val uriWithoutSignature = request.uri.withQuery(request.uri.query() filterNot { case (k, _) ⇒ k == "signature" })
val notExpired = isNotExpired(expiresAt)
val calculatedSignature = calculateSignature(request.method, uriWithoutSignature)
if (notExpired && calculatedSignature == signature) pass else {
log.debug("Failed to validate request: {}, notExpired: {}, signature: {}; calculated signature: {}", notExpired, ctx.request, signature, calculatedSignature)
reject(InvalidFileSignature)
}
}
}
private def isNotExpired(expiresAt: Long) = expiresAt <= Instant.now.plus(Duration.ofDays(1)).toEpochMilli
}
| y0ke/actor-platform | actor-server/actor-fs-adapters/src/main/scala/im/actor/server/file/local/http/FilesHttpHandler.scala | Scala | agpl-3.0 | 5,067 |
/*
* Copyright 2015 – 2018 Paul Horn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.knutwalker.akka.http
import de.knutwalker.akka.stream.JsonStreamParser
import akka.http.scaladsl.model.HttpEntity
import akka.http.scaladsl.model.MediaTypes.`application/json`
import akka.http.scaladsl.unmarshalling.{ FromEntityUnmarshaller, Unmarshaller }
import akka.http.scaladsl.util.FastFuture
import akka.stream.scaladsl.Sink
import akka.stream.stage.{ GraphStageLogic, GraphStageWithMaterializedValue, InHandler }
import akka.stream.{ AbruptStageTerminationException, Attributes, Inlet, SinkShape }
import jawn.Facade
import scala.concurrent.{ Future, Promise }
import java.util.NoSuchElementException
object JsonSupport extends JsonSupport {
private def firstElementSink[J <: AnyRef]: Sink[J, Future[J]] =
Sink.fromGraph(new FirstElementSinkStage[J])
private final class FirstElementSinkStage[J <: AnyRef] extends GraphStageWithMaterializedValue[SinkShape[J], Future[J]] {
private[this] val in: Inlet[J] = Inlet("firstElement.in")
override val shape: SinkShape[J] = SinkShape.of(in)
override protected def initialAttributes: Attributes = Attributes.name("firstElement")
override def createLogicAndMaterializedValue(inheritedAttributes: Attributes): (GraphStageLogic, Future[J]) = {
val p: Promise[J] = Promise()
(new GraphStageLogic(shape) with InHandler {
private[this] var element: J = null.asInstanceOf[J]
override def preStart(): Unit = pull(in)
def onPush(): Unit = {
if (element eq null) {
element = grab(in)
}
pull(in)
}
override def onUpstreamFinish(): Unit = {
val el = element
element = null.asInstanceOf[J]
if (el ne null) {
p.trySuccess(el)
} else {
p.tryFailure(new NoSuchElementException("No complete json entity consumed"))
}
completeStage()
}
override def onUpstreamFailure(ex: Throwable): Unit = {
element = null.asInstanceOf[J]
p.tryFailure(ex)
failStage(ex)
}
override def postStop(): Unit = {
if (!p.isCompleted) {
p.failure(new AbruptStageTerminationException(this))
()
}
}
setHandler(in, this)
}, p.future)
}
override def toString: String = "FirstElementSinkStage"
}
}
trait JsonSupport {
implicit def jsonUnmarshaller[J <: AnyRef : Facade]: FromEntityUnmarshaller[J] =
Unmarshaller.withMaterializer[HttpEntity, J](_ => implicit mat => {
case HttpEntity.Strict(_, data) => FastFuture(JsonStreamParser.parse[J](data))
case entity => entity.dataBytes.via(JsonStreamParser[J]).runWith(JsonSupport.firstElementSink[J])
}).forContentTypes(`application/json`)
}
| knutwalker/akka-stream-json | http-json/src/main/scala/de/knutwalker/akka/http/JsonSupport.scala | Scala | apache-2.0 | 3,399 |
package examples.demo
import java.awt.Dimension
import examples.demo.GModularClockCircle.Clock
import examples.demo.ui.ShapesPanel
import scala.swing.{MainFrame, SimpleSwingApplication, UIElement}
abstract class Main extends SimpleSwingApplication {
val panel: ShapesPanel
override lazy val top = {
panel.preferredSize = new Dimension(400, 300)
new MainFrame {
title = "REScala Demo"
contents = panel
setLocationRelativeTo(new UIElement { override def peer = null })
}
}
override def main(args: Array[String]): Unit = {
super.main(args)
while(!top.visible) Thread.sleep(5)
while(top.visible) {
Thread.sleep(1)
Clock.tick()
}
}
}
| volkc/REScala | Examples/examples/src/main/scala/examples/demo/Main.scala | Scala | apache-2.0 | 704 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import org.scalatest.{BeforeAndAfter, Matchers}
class FutureActionSuite
extends SparkFunSuite
with BeforeAndAfter
with Matchers
with LocalSparkContext {
before {
sc = new SparkContext("local", "FutureActionSuite")
}
test("simple async action") {//简单的异步操作
val rdd = sc.parallelize(1 to 10, 2)
val job = rdd.countAsync()//异步
//Await.result或者Await.ready会导致当前线程被同步(阻塞),并等待actor通过它的应答来完成Future
val res = Await.result(job, Duration.Inf)//Duration.Inf 无限等待
res should be (10)
job.jobIds.size should be (1)
}
test("complex async action") {//复杂的异步操作
val rdd = sc.parallelize(1 to 15, 3)
val job = rdd.takeAsync(10)
val res = Await.result(job, Duration.Inf)
res should be (1 to 10)
job.jobIds.size should be (2)
}
}
| tophua/spark1.52 | core/src/test/scala/org/apache/spark/FutureActionSuite.scala | Scala | apache-2.0 | 1,773 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.utils.conf
import com.typesafe.scalalogging.LazyLogging
import org.junit.runner.RunWith
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class SemanticVersionTest extends Specification with LazyLogging {
"SemanticVersion" should {
"parse normal versions" in {
SemanticVersion("2.0.0") mustEqual SemanticVersion(2, 0, 0)
SemanticVersion("1.3.5") mustEqual SemanticVersion(1, 3, 5)
}
"parse pre-release versions" in {
SemanticVersion("2.0.0-SNAPSHOT") mustEqual SemanticVersion(2, 0, 0, Some("SNAPSHOT"))
SemanticVersion("1.0.0-alpha") mustEqual SemanticVersion(1, 0, 0, Some("alpha"))
SemanticVersion("1.0.0-alpha.1") mustEqual SemanticVersion(1, 0, 0, Some("alpha.1"))
SemanticVersion("1.0.0-0.3.7") mustEqual SemanticVersion(1, 0, 0, Some("0.3.7"))
SemanticVersion("1.0.0-x.7.z.92") mustEqual SemanticVersion(1, 0, 0, Some("x.7.z.92"))
}
"parse build versions" in {
SemanticVersion("1.0.0-alpha+001") mustEqual SemanticVersion(1, 0, 0, Some("alpha"), Some("001"))
SemanticVersion("1.0.0+20130313144700") mustEqual SemanticVersion(1, 0, 0, build = Some("20130313144700"))
SemanticVersion("1.0.0-beta+exp.sha.5114f85") mustEqual SemanticVersion(1, 0, 0, Some("beta"), Some("exp.sha.5114f85"))
}
"sort correctly" in {
val sorted = Seq (
Seq("1.0.0", "2.0.0", "2.1.0", "2.1.1"),
Seq("1.0.0-alpha", "1.0.0"),
Seq("1.0.0-alpha", "1.0.0-alpha.1", "1.0.0-alpha.beta", "1.0.0-beta", "1.0.0-beta.2", "1.0.0-beta.11", "1.0.0-rc.1", "1.0.0")
).map(_.map(SemanticVersion.apply))
// should already be sorted, so verify sorting doesn't change order
foreach(sorted)(s => s.sorted mustEqual s)
}
"handle non-semantic releases" in {
SemanticVersion("1.3.5.1") must throwAn[Exception]
SemanticVersion("1.3.5.1", lenient = true) mustEqual SemanticVersion(1, 3, 5)
}
}
}
| locationtech/geomesa | geomesa-utils/src/test/scala/org/locationtech/geomesa/utils/conf/SemanticVersionTest.scala | Scala | apache-2.0 | 2,475 |
object Test {
def foo[A] = implicitly[OptManifest[A]] // was "unpositioned tree" under -Yrangepos
// These did not crash, but testing for good measure.
implicitly[OptManifest[String]]
implicitly[Manifest[String]]
implicitly[reflect.ClassTag[String]]
implicitly[reflect.runtime.universe.TypeTag[String]]
}
| loskutov/intellij-scala | testdata/scalacTests/pos/t8617.scala | Scala | apache-2.0 | 319 |
/*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.flaminem.flamy.exec.hive
import java.io.File
import java.sql.SQLException
import com.flaminem.flamy.conf.{Environment, FlamyConfVars, FlamyContext}
import com.flaminem.flamy.exec.files.{FileRunner, ItemFileAction}
import com.flaminem.flamy.exec.utils.Action
import com.flaminem.flamy.exec.utils.io.FlamyOutput
import com.flaminem.flamy.graph.TableGraph
import com.flaminem.flamy.model._
import com.flaminem.flamy.model.core.Model
import com.flaminem.flamy.model.exceptions.FailedQueryException
import com.flaminem.flamy.model.files._
import com.flaminem.flamy.parsing.hive.QueryUtils
import com.flaminem.flamy.utils.sql._
import com.flaminem.flamy.utils.sql.hive.StreamedResultSet
import scala.util.control.NonFatal
import scala.util.{Failure, Success, Try}
/**
* A HiveRunner provides all the necessary methods to execute queries on a Hive environment.
*
* Implementing classes should have a close() method to allow systematic closing of connections.
* It should provide the following guarantees:
* - If close() is called and no other method is called afterwards then we guarantee that no connection or other closeable resource is left open.
* - If another method is called after close() has been called, this method should work normally (by re-opening the previously closed resources if necessary).
*
* This means that a HiveRunner must still be functional after being closed.
*
* @param context
*/
//noinspection ScalaStyle
abstract class HiveRunner(val context: FlamyContext) extends AutoCloseable {
/**
* Run a query and ignore the result.
*
* @param query
* @param title title of the query. May be use to display information
* @return
*/
protected def runPreparedQuery(query: String, title: Option[String]): Int
/**
* Run a query and optionally returns the result.
*
* @param query
* @param title title of the query. May be use to display information
* @return
*/
protected def executePreparedQuery(query: String, title: Option[String]): StreamedResultSet
def runCreate(text: String, variables: Variables): Unit
def runCheck(text: String, variables: Variables): Unit
val dryRun: Boolean = context.dryRun
private def addPrefix(query: String, prefix: String) = {
val lowerCaseQuery: String = query.toUpperCase
if (lowerCaseQuery.startsWith("SET") || lowerCaseQuery.startsWith("USE") || lowerCaseQuery.startsWith("MSCK") || lowerCaseQuery.startsWith("ADD JAR")) {
query
}
else {
prefix + query
}
}
private def addExplainPrefix(query: String): String = {
addPrefix(query, "EXPLAIN ")
}
private def addExplainDependencyPrefix(query: String): String = {
addPrefix(query, "EXPLAIN DEPENDENCY ")
}
protected def handleQueryFailure(query: String, e: Throwable): Nothing = {
System.err.println(f"FAILED query : \n$query")
Option(e.getMessage).foreach{msg => System.err.println(f"MESSAGE : \nf$msg")}
System.err.flush()
throw new FailedQueryException(e.getMessage, e)
}
private def prepareQuery(query: String, variables: Variables, explainIfDryRun: Boolean): String = {
try {
val buildQ: String = variables.replaceInText(query)
if (dryRun && explainIfDryRun) {
addExplainPrefix(buildQ)
}
else{
buildQ
}
}
catch {
case NonFatal(e) => handleQueryFailure(query, e)
}
}
def runQuery(query: String, title: Option[String], variables: Variables, explainIfDryRun: Boolean): Int = {
val preparedQuery: String = prepareQuery(query, variables, explainIfDryRun)
Try {
FlamyOutput.out.info("Running query:\n" + preparedQuery)
runPreparedQuery(preparedQuery, title)
}
match {
case Failure(e) => handleQueryFailure(preparedQuery, e)
case Success(returnValue) if returnValue > 0 => throw new FailedQueryException
case Success(returnValue) => returnValue
}
}
/**
* Should only be used to execute small queries, for experimental features
* @param query
*/
def executeQuery(query: String): StreamedResultSet = {
executeQuery(query, None, new Variables, explainIfDryRun = true)
}
def getInputsFromQuery(query: String, variables: Variables): Seq[Inputs] = {
Try {
val preparedQuery = addExplainDependencyPrefix(prepareQuery(query, variables, explainIfDryRun = false))
executePreparedQuery(preparedQuery, None)
}
match {
case Failure(e: java.sql.SQLException) =>
/* For EXPLAIN DEPENDENCY, we ignore java.sql.SQLExceptions, as the Spark ThrifServer does not support this command yet */
Nil
case Failure(e) =>
handleQueryFailure(query, e)
throw new FailedQueryException(e.getMessage, e)
case Success(returnValue) =>
returnValue.map{row => Inputs.fromJson(row.head)}.toSeq
}
}
def executeQuery(query: String, title: Option[String], variables: Variables, explainIfDryRun: Boolean): StreamedResultSet = {
Try{
val preparedQuery = prepareQuery(query, variables, explainIfDryRun)
executePreparedQuery(preparedQuery, title)
}
match {
case Failure(e) => handleQueryFailure(query, e)
throw new FailedQueryException(e.getMessage, e)
case Success(returnValue) => returnValue
}
}
/**
* Uses Hive's EXPLAIN DEPENDENCY to obtain the list of input tables and partitions.
* @param populateInfo
* @param closeAfter
*/
def getInputsFromPopulate(populateInfo: PopulateInfo, closeAfter: Boolean = true): Seq[Inputs] = {
getInputsFromText(populateInfo.tableFile.text, populateInfo.variables, closeAfter)
}
def runPopulate(populateInfo: PopulateInfo, closeAfter: Boolean = true): Unit = {
runText(populateInfo.tableFile.text, Some(s"POPULATE ${populateInfo.title}"), populateInfo.variables, explainIfDryRun = true, closeAfter)
}
def runPresets(file: PresetsFile): Unit = runText(file.text, None, context.getVariables, explainIfDryRun = false, closeAfter = false)
def runCheckTable(file: TableFile): Unit = runCheck(file.text, context.getVariables)
def runCheckSchema(file: SchemaFile): Unit = runCheck(file.text, context.getVariables)
def runCheckView(file: TableFile): Unit = runCheck(file.text, context.getVariables)
def runCreateTable(file: TableFile): Unit = runCreate(file.text, context.getVariables)
def runCreateView(file: TableFile): Unit = runCreate(file.text, context.getVariables)
def runCreateSchema(file: SchemaFile): Unit = runCreate(file.text, context.getVariables)
def runTest(file: TableFile, variables: Variables): Unit = {
runTestText(file.text, Some(s"TEST ${file.tableName.toString}"), variables, explainIfDryRun = true)
}
def checkResults(resultSet: StreamedResultSet): Option[String] = {
if(!resultSet.hasNext) {
Some("Results are empty")
}
else {
val row: ResultRow = resultSet.next
if(resultSet.hasNext) {
Some(
s"""Tests should output only one row : got
|$row
|${resultSet.next}
|"""".stripMargin
)
}
else {
row.headOption
}
}
}
def runTestQuery(query: String, title: Option[String], variables: Variables, explainIfDryRun: Boolean): Unit = {
val preparedQuery = prepareQuery(query, variables, explainIfDryRun)
val resultSet = executePreparedQuery(preparedQuery, title)
val result: Option[String] =
if(dryRun && explainIfDryRun) {
None
}
else {
checkResults(resultSet)
}
result match {
case Some(s) if s.startsWith("ok") =>
FlamyOutput.out.success(s)
case Some(s) =>
FlamyOutput.out.failure(s)
case None => ()
}
}
def runTestText(text: String, title: Option[String], variables: Variables, explainIfDryRun: Boolean): Unit = {
QueryUtils.cleanAndSplitQuery(text).foreach{ query => runTestQuery(query, title, variables, explainIfDryRun) }
}
/**
* Execute a text command.
*
* @param text text of comma-separated queries to execute
* @param title title of the query, used to define the name of the YARN job
* @param explainIfDryRun add explain before each query if dry-run mode is activated
* @param closeAfter if true, the method close() will be called after the command is executed (optional, default = true).
*/
def runText(text: String, title: Option[String], variables: Variables, explainIfDryRun: Boolean, closeAfter: Boolean = true): Unit = {
QueryUtils.cleanAndSplitQuery(text).foreach {
query => runQuery(query, title, variables, explainIfDryRun)
}
if(closeAfter) {
close()
}
else {
runPreparedQuery("USE default", None)
}
}
/**
* Uses Hive's EXPLAIN DEPENDENCY to obtain the list of input tables and partitions.
*
* @param text text of comma-separated queries to execute
* @param closeAfter if true, the method close() will be called after the command is executed (optional, default = true).
*/
def getInputsFromText(text: String, variables: Variables, closeAfter: Boolean = true): Seq[Inputs] = {
val res: Seq[Inputs] =
QueryUtils.cleanAndSplitQuery(text).flatMap {
query => getInputsFromQuery(query, variables)
}
if(closeAfter) {
close()
}
else {
runPreparedQuery("USE default", None)
}
res
}
private val fileRunner: FileRunner = new FileRunner
def checkAll(graph: TableGraph): Unit = {
fileRunner.run(runCheckSchema(_) , graph.model.fileIndex.getAllSchemaFilesOfType(FileType.CREATE_SCHEMA))
fileRunner.run(runCheckTable(_) , graph.getTableFilesOfType(FileType.CREATE))
fileRunner.run(runCheckView(_) , graph.getTopologicallySortedViewFiles)
}
def runPresets(context: FlamyContext): Unit = {
context.HIVE_PRESETS_PATH.getProperty match {
case Some(path) =>
FlamyOutput.out.println(f"Running presets: $path")
fileRunner.run(runPresets(_:PresetsFile), new PresetsFile(new File(path))::Nil)
case _ =>
FlamyOutput.out.println("No presets to run")
}
}
def populateAll(model: Model, context: FlamyContext): Unit = {
runPresets(context)
val actions: Iterable[Action] =
for {
tableInfo: TableInfo <- model.getAllTables
populateInfo: PopulateInfo <- tableInfo.populateInfos
tableFile: TableFile = populateInfo.tableFile
} yield {
new ItemFileAction(tableFile) {
override def run(): Unit = {
/* We replace partition values with dummy constant so that the validation passes */
val vars = populateInfo.variables.cancelPartitions(tableInfo.partitions)
runPopulate(populateInfo.copy(variables = vars), closeAfter = false)
}
}
}
fileRunner.run(actions)
close()
}
def testAll(fileIndex: FileIndex, context: FlamyContext): Unit = {
fileRunner.run(runTest(_ : TableFile, context.getVariables) , fileIndex.getAllTableFilesOfType(FileType.TEST))
}
def getStats: FileRunner#Stats = {
fileRunner.getStats
}
/**
* Retrieve the value of given configuration parameter.
*
* @param key
* @return
*/
def getConfiguration(key: String): Option[String]
/**
* Override this to make the implementation AutoCloseable.
* This method should provide the following guarantee :
* - If close() is called and no other method is called afterwards then we guarantee that no connection or other closeable resource is left open.
* - If another method is called after close() has been called,
* this method should work normally (by re-opening the previously closed resources if necessary).
*/
def close(): Unit
def interrupt(): Unit
}
object HiveRunner {
def apply(context: FlamyContext): HiveRunner = {
val runner: HiveRunner =
if (context.env == Environment.MODEL_ENV) {
new ModelHiveRunner(context)
}
else if (context.HIVE_RUNNER_TYPE.getProperty.toLowerCase == "local") {
new LocalHiveRunner(context)
}
else {
new RemoteHiveRunner(context)
}
runner.runPresets(context)
runner
}
} | flaminem/flamy | src/main/scala/com/flaminem/flamy/exec/hive/HiveRunner.scala | Scala | apache-2.0 | 12,816 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.contrib.spire.random
import scalaz.{Functor, Monad, Zip}
import spire.random.Dist
trait DistInstances {
implicit val distMonad: Monad[Dist] =
new Monad[Dist] {
def point[A](a: => A) =
Dist.constant(a)
def bind[A, B](fa: Dist[A])(f: A => Dist[B]) =
fa flatMap f
override def map[A, B](fa: Dist[A])(f: A => B) =
fa map f
}
implicit val distZip: Zip[Dist] =
new Zip[Dist] {
def zip[A, B](a: => Dist[A], b: => Dist[B]) =
a zip b
override def zipWith[A, B, C](a: => Dist[A], b: => Dist[B])(f: (A, B) => C)(implicit F: Functor[Dist]) =
a.zipWith(b)(f)
}
}
object dist extends DistInstances
| jedesah/Quasar | foundation/src/main/scala/quasar/contrib/spire/random/dist.scala | Scala | apache-2.0 | 1,301 |
package org.amcgala.vr
import akka.actor.{ PoisonPill, ActorRef, ActorSystem, Props }
import scala.util.Random
import org.amcgala._
import org.amcgala.shape.Rectangle
import org.amcgala.math.Vertex3f
import org.amcgala.vr.building.{ BuildingType, TownHall, Building }
/**
* The absolute position of an entity in the [[Simulation]].
* @param x
* @param y
*/
case class Coordinate(x: Int, y: Int)
/**
* A Cell in the simulated world map.
* @param cellType the [[CellType]] of the Cell.
*/
case class Cell(cellType: CellType)
object SimulationAgent {
/**
* Registers a new [[BotAgent]] with the [[SimulationAgent]].
* @param bot the [[ActorRef]] of the [[BotAgent]]
* @param position the [[Coordinate]] in the simulated world
*/
case class RegisterAgentRequest(bot: ActorRef, position: Coordinate)
case class RegisterBuildingRequest(bot: ActorRef, position: Coordinate, buildingType: BuildingType)
/**
* Removes a [[BotAgent]] from the [[SimulationAgent]]
*/
case object UnregisterRequest
/**
* Changes the [[CellType]] of a Cell.
* @param index the [[Coordinate]] of the [[Cell]] in the world
* @param cellType the new [[CellType]]
*/
case class CellTypeChangeRequest(index: Coordinate, cellType: CellType)
/**
* Changes the [[Coordinate]] of a [[BotAgent]].
* @param position the new position
*/
case class PositionChangeRequest(position: Coordinate)
/**
* A position request from someone. The [[SimulationAgent]]'s response is the current position of the Bot.
* @param ref the [[ActorRef]] of the Bot
*/
case class PositionRequest(ref: ActorRef)
/**
* The SimulationAgent answers this message with a [[Cell]] instance if the [[ActorRef]] is known.
* @param ref the [[ActorRef]] of the requesting Bot
*/
case class CellRequest(ref: ActorRef)
case class CellAtIndexRequest(index: Coordinate)
/**
* The SimulationAgent answers this message with a List of ([[ActorRef]], [[Coordinate]]) Tuples of all Bots that are
* in the vicinity of the requesting Bot.
* @param ref the [[ActorRef]] of the requesting Bot
* @param distance the radius of the vicinity
*/
case class VicinityRequest(ref: ActorRef, distance: Int)
case class VicinityReponse(bots: Map[ActorRef, Coordinate], buildings: Map[ActorRef, (Coordinate, BuildingType)])
case class VisibleCellsRequest(ref: ActorRef, distance: Int)
def props(width: Int, height: Int) = Props(new SimulationAgent(width, height))
}
class SimulationAgent(val width: Int, val height: Int) extends Agent {
import SimulationAgent._
var field = (for {
x ← 0 until width
y ← 0 until height
} yield Coordinate(x, y) -> Cell(CellType.Floor)).toMap
var agentPositions = Map[ActorRef, Coordinate]()
var buildingPositions = Map[ActorRef, (Coordinate, BuildingType)]()
val townHall = context.actorOf(TownHall.props(), "town-hall")
val townHallLocation = Coordinate(100, 100)
self ! RegisterBuildingRequest(townHall, townHallLocation, BuildingType.TownHall)
val framework = Framework.getInstance(FrameworkMode.SOFTWARE)
val scene = new Scene("vis")
val scaleX = framework.getWidth / width
val scaleY = framework.getHeight / height
val rectangles = Array.ofDim[Rectangle](width, height)
for {
x ← 0 until width
y ← 0 until height
} {
val rect = new Rectangle(new Vertex3f(x * scaleX, y * scaleY, -1), scaleX, scaleY)
rect.setColor(RGBColor.BLACK)
scene.addShape(rect)
rectangles(x)(y) = rect
}
framework.loadScene(scene)
registerOnTickAction("drawing", () ⇒ {
for (e ← field) {
val coordinate = e._1
val cell = e._2
rectangles(coordinate.x.toInt)(coordinate.y.toInt).setColor(cell.cellType.color)
}
val posIt = agentPositions.iterator
while (posIt.hasNext) {
val next = posIt.next()
rectangles(next._2.x)(next._2.y).setColor(RGBColor.GREEN)
}
val buildingsIt = buildingPositions.iterator
while (buildingsIt.hasNext) {
val next = buildingsIt.next()
rectangles(next._2._1.x)(next._2._1.y).setColor(RGBColor.BLUE)
}
})
def receive: Receive = {
case RegisterAgentRequest(bot, position) ⇒
if (position.x >= 0 && position.x < width && position.y >= 0 && position.y < height) {
if (field(position).cellType != CellType.Forbidden) {
bot ! BotAgent.Introduction(townHallLocation)
bot ! BotAgent.PositionChange(position)
agentPositions = agentPositions + (bot -> position)
townHall.tell(TownHall.RegisterBot, bot)
}
} else {
bot ! PoisonPill
}
case RegisterBuildingRequest(ref, position, buildingType) ⇒
field.get(position) match {
case Some(cell) ⇒
if (cell.cellType != CellType.Forbidden) {
ref ! BotAgent.Introduction(townHallLocation)
ref ! SimulationAgent.PositionChangeRequest(position)
val info = (position, buildingType)
buildingPositions = buildingPositions + (ref -> info)
townHall.tell(TownHall.RegisterBuilding(buildingType), ref)
}
case None ⇒ ref ! PoisonPill
}
case CellTypeChangeRequest(coordinate, cellType) ⇒
for (cell ← field.get(coordinate)) {
field += (coordinate -> Cell(cellType))
}
case PositionChangeRequest(position) ⇒
for (cell ← field.get(position)) {
if (cell.cellType != CellType.Forbidden) {
agentPositions = agentPositions + (sender() -> position)
sender() ! BotAgent.PositionChange(position)
}
}
case PositionRequest(ref) ⇒
for (pos ← agentPositions.get(ref)) {
sender() ! pos
}
case VicinityRequest(ref, dis) ⇒
val pos = agentPositions(ref)
val v = VicinityReponse(agentPositions.filter(t ⇒ Utils.manhattanDistance(pos, t._2) <= dis && t._1 != ref), buildingPositions.filter(t ⇒ Utils.manhattanDistance(pos, t._2._1) < dis && t._1 != ref))
sender() ! v
case CellRequest(ref) ⇒
val position = agentPositions(ref)
sender() ! field(position)
case CellAtIndexRequest(coordinate) ⇒
for (cell ← field.get(coordinate)) {
sender() ! cell
}
case UnregisterRequest ⇒
agentPositions = agentPositions - sender()
case VisibleCellsRequest(ref, distance) ⇒
val position = agentPositions(ref)
val cells = field.filter(e ⇒ Utils.manhattanDistance(position, e._1) <= distance).toMap
sender() ! cells
}
}
class Simulation(val width: Int, val height: Int)(implicit system: ActorSystem) {
import SimulationAgent._
private val heartbeat = system.actorOf(HeartBeat.props())
private val sim = system.actorOf(SimulationAgent.props(width, height))
/**
* Creates a new instance of a [[BotAgent]].
* @param cls the class of the bot, must be a subclass of [[BotAgent]]
* @param position the starting position of the bot
* @tparam T the class type of this bot
* @return [[ActorRef]]
*/
def spawnBot[T <: BotAgent](cls: Class[T], position: Coordinate): Bot = {
val bot = system.actorOf(Props(cls))
sim ! RegisterAgentRequest(bot, position)
Bot(bot)
}
def spawnBot[T <: BotAgent](cls: Class[T]): Bot = {
val bot = system.actorOf(Props(cls))
sim ! RegisterAgentRequest(bot, randomPosition())
Bot(bot)
}
def spawnBuilding[T <: Building](cls: Class[T], position: Coordinate, buildingType: BuildingType): ActorRef = {
val building = system.actorOf(Props(cls))
sim ! RegisterBuildingRequest(building, position, buildingType)
building
}
def changeCellType(index: Coordinate, cellType: CellType) = {
sim ! CellTypeChangeRequest(index, cellType)
}
def randomPosition(): Coordinate = Coordinate(Random.nextInt(width), Random.nextInt(height))
}
| th-koeln/amcgala-vr | src/main/scala/org/amcgala/vr/Simulation.scala | Scala | apache-2.0 | 7,925 |
// Copyright 2014-2016 Leonardo Schwarz (leoschwarz.com)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.leoschwarz.quest_on.data.survey_ast
import org.json4s.JsonAST.{JArray, JNull, JObject, JString}
import scala.collection.mutable.ArrayBuffer
class Page(val items: IndexedSeq[PageItem],
val timelimit: Option[Timelimit],
val actions: IndexedSeq[String]) {
}
object Page {
def parse(pageData: JObject, logger: ParserLogger): Option[Page] = {
val actions = pageData \\ "actions" match {
case JArray(values) => {
val buffer = new ArrayBuffer[String](values.length)
for (value <- values) value match {
case JString(v) => buffer += v
case _ => {
logger("Page action is of invalid type (must be string).")
return None
}
}
buffer
}
case _ => {
logger("Page actions attribute is of invalid type (must be array).")
return None
}
}
val timelimit = pageData \\ "timelimit" match {
case obj: JObject => {
val limit = Timelimit.parse(obj, logger)
if (limit.isEmpty) {
logger("Parsing timelimit failed.")
return None
}
limit
}
case _ => None
}
val items = pageData \\ "items" match {
case JArray(items) => {
val buffer = new ArrayBuffer[PageItem](items.length)
for (item <- items) item match {
case obj: JObject => {
val i = PageItem.parse(obj, logger)
if (i.isEmpty) {
logger("Failed parsing pageitem.")
return None
}
buffer += i.get
}
case _ => {
logger("Page item is of wrong type. (must be json object)")
return None
}
}
buffer
}
case _ => {
logger("Page 'items' attribute is of wrong kind. (must be array)")
return None
}
}
Some(new Page(items, timelimit, actions))
}
} | evotopid/quest_on | src/main/scala/com/leoschwarz/quest_on/data/survey_ast/Page.scala | Scala | apache-2.0 | 2,548 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package mocks.services
import mocks.Mock
import org.mockito.stubbing.OngoingStubbing
import org.scalatest.Suite
import play.api.mvc.Request
import router.httpParsers.SelfAssessmentHttpParser.SelfAssessmentOutcome
import router.services.PropertyEopsObligationsService
import uk.gov.hmrc.http.HeaderCarrier
import scala.concurrent.Future
trait MockPropertyEopsObligationsService extends Mock { _: Suite =>
val mockPropertyEopsObligationsService = mock[PropertyEopsObligationsService]
object MockPropertyEopsObligationsService {
def get(): OngoingStubbing[Future[SelfAssessmentOutcome]] = {
when(mockPropertyEopsObligationsService.get()(any[HeaderCarrier](), any[Request[_]]()))
}
}
override protected def beforeEach(): Unit = {
super.beforeEach()
reset(mockPropertyEopsObligationsService)
}
}
| hmrc/self-assessment-api | test/mocks/services/MockPropertyEopsObligationsService.scala | Scala | apache-2.0 | 1,434 |
package sampleclean.clean.extraction
import sampleclean.api.SampleCleanContext
import org.apache.spark.SparkContext._
import org.apache.spark.sql.SQLContext
import sampleclean.clean.algorithm.SampleCleanAlgorithm
import sampleclean.clean.algorithm.AlgorithmParameters
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{SchemaRDD, Row}
import sampleclean.activeml._
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.graphx._
import sampleclean.crowd._
import sampleclean.crowd.context.{DeduplicationPointLabelingContext, DeduplicationGroupLabelingContext}
/**
* The Abstract Deduplication class builds the structure for
* subclasses that implement deduplication. It has two basic
* primitives a blocking function and then an apply function (implemented)
* in subclasses.
*/
abstract class AbstractExtraction(params:AlgorithmParameters,
scc: SampleCleanContext, sampleTableName: String) extends
SampleCleanAlgorithm(params, scc, sampleTableName) {
if(!params.exists("newSchema"))
throw new RuntimeException("You need to specify a set of new cols: newSchema")
val newCols = params.get("newSchema").asInstanceOf[List[String]]
def exec()={
val data = scc.getCleanSample(sampleTableName)
val hc = scc.getHiveContext()
val cleanSampleTable = scc.qb.getCleanSampleName(sampleTableName)
val dirtySampleTable = scc.qb.getDirtySampleName(sampleTableName)
val baseTable = scc.getParentTable(scc.qb.getDirtySampleName(sampleTableName))
val existingCols = scc.getTableContext(sampleTableName)
var actualNewCols = List[String]()
for (col <- newCols)
if (!existingCols.contains(col))
actualNewCols = col :: actualNewCols
actualNewCols = actualNewCols.reverse
if(actualNewCols.length > 0){
val query1 = scc.qb.addColsToTable(actualNewCols, cleanSampleTable)
println(query1)
scc.hql(query1)
val query2 = scc.qb.addColsToTable(actualNewCols, dirtySampleTable)
println(query2)
scc.hql(query2)
val query3 = scc.qb.addColsToTable(actualNewCols, baseTable)
println(query3)
scc.hql(query3)
}
//scc.hql("select split(name, ',') from "+cleanSampleTable).collect().foreach(println)
val extract = extractFunction(data)
for (col <- newCols){
//println(col)
//extract(col).collect().foreach(println)
var start_time = System.nanoTime()
scc.updateTableAttrValue(sampleTableName,col,extract(col))
println("Extract Apply Time: " + (System.nanoTime() - start_time)/ 1000000000)
}
}
def extractFunction(data:SchemaRDD): Map[String,RDD[(String,String)]]
} | sjyk/sampleclean-async | src/main/scala/sampleclean/clean/extract/AbstractExtraction.scala | Scala | apache-2.0 | 2,592 |
package utilities
/**
* Created by weijiayi on 3/6/16.
*/
trait ChangeSource {
private val listeners = scala.collection.mutable.ListBuffer[ChangeListener]()
def addListener(l: ChangeListener) = {
listeners += l
l.editingUpdated()
}
def notifyListeners() = listeners.foreach(_.editingUpdated())
def beforeNotify(action: =>Unit) = {
action
notifyListeners()
}
def newSettable[T](init: T) = {
new Settable(init, notifyListeners)
}
}
trait ChangeListener {
def editingUpdated(): Unit
}
class Settable[T](private var value: T, action: () => Unit) {
def set(v: T) = {
if(v!=value){
value = v
action()
}
}
def get = value
}
| MrVPlusOne/Muse-CGH | src/utilities/ChangeSource.scala | Scala | mit | 699 |
/**
* This file is part of agora_elections.
* Copyright (C) 2014-2016 Agora Voting SL <[email protected]>
* agora_elections is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License.
* agora_elections is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
* You should have received a copy of the GNU Affero General Public License
* along with agora_elections. If not, see <http://www.gnu.org/licenses/>.
**/
package commands
import models._
import utils.JsonFormatters._
import utils.Crypto
import play.api.libs.json._
import javax.crypto.spec.SecretKeySpec
import javax.crypto.Mac
import javax.xml.bind.DatatypeConverter
import java.math.BigInteger
/**
* Encrypts votes given a pk, plaintext file and desired total
*
* use runMain commands.DemoVotes <pks> <votes> <total> from console
* or
* activator "run-main commands.DemoVotes <pks> <votes> <total>"
* from CLI
*/
object DemoVotes {
def main(args: Array[String]) : Unit = {
val jsonPks = Json.parse(scala.io.Source.fromFile(args(0)).mkString)
val pks = jsonPks.validate[Array[PublicKey]].get
val jsonVotes = Json.parse(scala.io.Source.fromFile(args(1)).mkString)
val votes = jsonVotes.validate[Array[Array[Long]]].get
val toEncrypt = if(args.length == 3) {
val extraSize = (args(2).toInt - votes.length).max(0)
val extra = Array.fill(extraSize){ votes(scala.util.Random.nextInt(votes.length)) }
votes ++ extra
} else {
votes
}
val histogram = toEncrypt.groupBy(l => l).map(t => (t._1, t._2.length))
System.err.println("DemoVotes: tally: " + histogram)
val jsonEncrypted = Json.toJson(toEncrypt.par.map(Crypto.encrypt(pks, _)).seq)
println(jsonEncrypted)
}
} | agoravoting/agora_elections | app/commands/DemoVotes.scala | Scala | agpl-3.0 | 2,049 |
package org.coursera.naptime.courier
import com.linkedin.data.DataMap
import com.linkedin.data.codec.JacksonDataCodec
import com.linkedin.data.schema.RecordDataSchema
import com.linkedin.data.schema.TyperefDataSchema
import com.linkedin.data.schema.UnionDataSchema
import com.linkedin.data.template.DataTemplateUtil
import com.linkedin.data.template.PrettyPrinterJacksonDataTemplateCodec
import com.linkedin.data.template.RecordTemplate
import com.linkedin.data.template.UnionTemplate
import org.coursera.courier.templates.DataTemplates.DataConversion
import org.coursera.pegasus.TypedDefinitionCodec
private[courier] object CourierTestFixtures {
private[this] val jsonCodec = new JacksonDataCodec()
private[this] val prettyPrinter = new PrettyPrinterJacksonDataTemplateCodec
val pegasusUnionJson =
s"""{
| "typedDefinition": {
| "UnionMember1": {
| "x": 1
| }
| }
|}
|""".stripMargin
val pegasusUnionJsonWithUnrecognizedField =
s"""{
| "typedDefinition": {
| "UnionMember1": {
| "x": 1
| }
| },
| "unrecognizedField": 2
|}
|""".stripMargin
val pegasusUnionSchema = DataTemplateUtil.parseSchema(
"""
|{
| "name": "TypedDefinitionExample1",
| "type": "record",
| "fields": [
| {
| "name": "typedDefinition", "type": {
| "name": "typedDefinitionUnionTyperef",
| "type": "typeref",
| "ref": [
| {
| "name": "UnionMember1",
| "type": "record",
| "fields": [
| { "name": "x", "type": "int" }
| ]
| },
| {
| "name": "UnionMember2",
| "type": "record",
| "fields": [
| { "name": "y", "type": "string" }
| ]
| }
| ]
| }
| }
| ]
|}
|""".stripMargin).asInstanceOf[RecordDataSchema]
val typedDefinitionJson =
s"""{
| "typedDefinition": {
| "typeName": "memberOne",
| "definition": {
| "x": 1
| }
| }
|}
|""".stripMargin
val typedDefinitionJsonWithUnrecognizedField =
s"""{
| "typedDefinition": {
| "typeName": "memberOne",
| "definition": {
| "x": 1
| }
| },
| "unrecognizedField": 2
|}
|""".stripMargin
val flatTypedDefinitionJson =
s"""{
| "typedDefinition": {
| "typeName": "memberOne",
| "x": 1
| }
|}
|""".stripMargin
val flatTypedDefinitionJsonWithUnrecognizedField =
s"""{
| "typedDefinition": {
| "typeName": "memberOne",
| "x": 1
| },
| "unrecognizedField": 2
|}
|""".stripMargin
val typedDefinitionSchema = DataTemplateUtil.parseSchema(
"""
|{
| "name": "TypedDefinitionExample1",
| "type": "record",
| "fields": [
| {
| "name": "typedDefinition", "type": {
| "name": "typedDefinitionUnionTyperef",
| "type": "typeref",
| "ref": [
| {
| "name": "UnionMember1",
| "type": "record",
| "fields": [
| { "name": "x", "type": "int" }
| ]
| },
| {
| "name": "UnionMember2",
| "type": "record",
| "fields": [
| { "name": "y", "type": "string" }
| ]
| }
| ],
| "typedDefinition": {
| "UnionMember1": "memberOne",
| "UnionMember2": "memberTwo"
| }
| }
| }
| ]
|}
|""".stripMargin).asInstanceOf[RecordDataSchema]
val typedDefinitionCodec =
new TypedDefinitionCodec(typedDefinitionSchema, prettyPrinter)
val typedDefinitionCodecPassthroughEnabled =
new TypedDefinitionCodec(typedDefinitionSchema, prettyPrinter, true)
class TypedDefinitionRecord(private val dataMap: DataMap)
extends RecordTemplate(dataMap, TypedDefinitionRecord.SCHEMA) {
dataMap.makeReadOnly()
}
object TypedDefinitionRecord {
val SCHEMA = typedDefinitionSchema
def apply(dataMap: DataMap, dataConversion: DataConversion) = {
new TypedDefinitionRecord(dataMap)
}
}
val flatTypedDefinitionSchema = DataTemplateUtil.parseSchema(
"""
|{
| "name": "TypedDefinitionExample1",
| "type": "record",
| "fields": [
| {
| "name": "typedDefinition", "type": {
| "name": "typedDefinitionUnionTyperef",
| "type": "typeref",
| "ref": [
| {
| "name": "UnionMember1",
| "type": "record",
| "fields": [
| { "name": "x", "type": "int" }
| ]
| },
| {
| "name": "UnionMember2",
| "type": "record",
| "fields": [
| { "name": "y", "type": "string" }
| ]
| }
| ],
| "flatTypedDefinition": {
| "UnionMember1": "memberOne",
| "UnionMember2": "memberTwo"
| }
| }
| }
| ]
|}
|""".stripMargin).asInstanceOf[RecordDataSchema]
val flatTypedDefinitionCodec =
new TypedDefinitionCodec(flatTypedDefinitionSchema, jsonCodec)
class FlatTypedDefinitionRecord(private val dataMap: DataMap)
extends RecordTemplate(dataMap, FlatTypedDefinitionRecord.SCHEMA) {
dataMap.makeReadOnly()
}
object FlatTypedDefinitionRecord {
val SCHEMA = flatTypedDefinitionSchema
def apply(dataMap: DataMap, dataConversion: DataConversion) = {
new FlatTypedDefinitionRecord(dataMap)
}
}
val complexSchema = DataTemplateUtil.parseSchema(
"""
|{
| "name": "Complex",
| "namespace": "org.example",
| "type": "record",
| "fields": [
| { "name": "int", "type": "int", "optional": true },
| { "name": "long", "type": "long", "optional": true },
| { "name": "float", "type": "float", "optional": true },
| { "name": "double", "type": "double", "optional": true },
| { "name": "boolean", "type": "boolean", "optional": true },
| { "name": "string", "type": "string", "optional": true },
| {
| "name": "record",
| "type": {
| "name": "Record",
| "namespace": "org.example",
| "type": "record",
| "fields": [
| { "name": "int", "type": "int", "optional": true }
| ]
| },
| "optional": true
| },
| { "name": "union", "type": [ "string", "Complex" ], "optional": true },
| { "name": "map", "type": { "type": "map", "values": "Complex" }, "optional": true },
| { "name": "array", "type": { "type": "array", "items": "Complex" }, "optional": true },
| {
| "name": "typedDefinition", "type": {
| "name": "TypedDefinition",
| "namespace": "org.example",
| "type": "typeref",
| "ref": [ "org.example.Complex", "Record" ],
| "typedDefinition": {
| "org.example.Complex": "complex",
| "Record": "record"
| }
| },
| "optional": true
| },
| {
| "name": "flatTypedDefinition", "type": {
| "name": "FlatTypedDefinition",
| "namespace": "org.example",
| "type": "typeref",
| "ref": [ "Complex", "org.example.Record" ],
| "flatTypedDefinition": {
| "Complex": "complex",
| "org.example.Record": "record"
| }
| },
| "optional": true
| }
| ]
|}
|""".stripMargin)
val pegasusComplexJson =
s"""{
| "int": 1,
| "long": 100,
| "float": 3.14,
| "double": 2.71,
| "boolean": true,
| "string": "hello",
| "record": { "int": 1 },
| "union": {
| "org.example.Complex": {
| "union": {
| "org.example.Complex": {
| }
| },
| "typedDefinition": {
| "org.example.Record": {
| "int": 1
| }
| },
| "flatTypedDefinition": {
| "org.example.Record": {
| "int": 1
| }
| }
| }
| },
| "map": {
| "key1": {
| "union": {
| "org.example.Complex": {
| }
| },
| "typedDefinition": {
| "org.example.Record": {
| "int": 1
| }
| },
| "flatTypedDefinition": {
| "org.example.Record": {
| "int": 1
| }
| }
| }
| },
| "array": [
| {
| "union": {
| "org.example.Complex": {
| }
| },
| "typedDefinition": {
| "org.example.Record": {
| "int": 1
| }
| },
| "flatTypedDefinition": {
| "org.example.Record": {
| "int": 1
| }
| }
| }
| ],
| "typedDefinition": {
| "org.example.Complex": {
| "union": {
| "org.example.Complex": {
| }
| },
| "typedDefinition": {
| "org.example.Record": {
| "int": 1
| }
| },
| "flatTypedDefinition": {
| "org.example.Record": {
| "int": 1
| }
| }
| }
| },
| "flatTypedDefinition": {
| "org.example.Complex": {
| "union": {
| "org.example.Complex": {
| }
| },
| "typedDefinition": {
| "org.example.Record": {
| "int": 1
| }
| },
| "flatTypedDefinition": {
| "org.example.Record": {
| "int": 1
| }
| }
| }
| }
|}
|""".stripMargin
val typedDefinitionComplexJson =
s"""{
| "int": 1,
| "long": 100,
| "float": 3.14,
| "double": 2.71,
| "boolean": true,
| "string": "hello",
| "record": { "int": 1 },
| "union": {
| "org.example.Complex": {
| "union": {
| "org.example.Complex": {
| }
| },
| "typedDefinition": {
| "typeName": "record",
| "definition": {
| "int": 1
| }
| },
| "flatTypedDefinition": {
| "typeName": "record",
| "int": 1
| }
| }
| },
| "map": {
| "key1": {
| "union": {
| "org.example.Complex": {
| }
| },
| "typedDefinition": {
| "typeName": "record",
| "definition": {
| "int": 1
| }
| },
| "flatTypedDefinition": {
| "typeName": "record",
| "int": 1
| }
| }
| },
| "array": [
| {
| "union": {
| "org.example.Complex": {
| }
| },
| "typedDefinition": {
| "typeName": "record",
| "definition": {
| "int": 1
| }
| },
| "flatTypedDefinition": {
| "typeName": "record",
| "int": 1
| }
| }
| ],
| "typedDefinition": {
| "typeName": "complex",
| "definition": {
| "union": {
| "org.example.Complex": {
| }
| },
| "typedDefinition": {
| "typeName": "record",
| "definition": {
| "int": 1
| }
| },
| "flatTypedDefinition": {
| "typeName": "record",
| "int": 1
| }
| }
| },
| "flatTypedDefinition": {
| "typeName": "complex",
| "union": {
| "org.example.Complex": {
| }
| },
| "typedDefinition": {
| "typeName": "record",
| "definition": {
| "int": 1
| }
| },
| "flatTypedDefinition": {
| "typeName": "record",
| "int": 1
| }
| }
|}
|""".stripMargin
val complexCodec = new TypedDefinitionCodec(complexSchema, jsonCodec)
class MockRecord(private val dataMap: DataMap)
extends RecordTemplate(dataMap, MockRecord.SCHEMA) {
dataMap.makeReadOnly()
}
object MockRecord {
val SCHEMA_JSON =
"""
|{
| "name": "MockRecord",
| "type": "record",
| "fields": [
| { "name": "string", "type": "string" },
| { "name": "int", "type": "int" }
| ]
|}
|""".stripMargin
val SCHEMA = DataTemplateUtil.parseSchema(SCHEMA_JSON).asInstanceOf[RecordDataSchema]
def apply(dataMap: DataMap, dataConversion: DataConversion) = {
new MockRecord(dataMap)
}
}
class MockTyperefUnion(private val dataMap: DataMap)
extends UnionTemplate(dataMap, MockTyperefUnion.SCHEMA) {
dataMap.makeReadOnly()
}
val mockTyperefUnionJson =
s"""{
| "int": 1
|}
|""".stripMargin
object MockTyperefUnion {
val SCHEMA_JSON =
"""
|[ "int", "string" ]
|""".stripMargin
val SCHEMA = DataTemplateUtil.parseSchema(SCHEMA_JSON).asInstanceOf[UnionDataSchema]
val TYPEREF_SCHEMA_JSON =
"""
|{
| "name": "MockTyperefUnion",
| "type": "typeref",
| "ref": [ "int", "string" ]
|}
|""".stripMargin
val TYPEREF_SCHEMA =
DataTemplateUtil.parseSchema(TYPEREF_SCHEMA_JSON).asInstanceOf[TyperefDataSchema]
def apply(dataMap: DataMap, dataConversion: DataConversion) = {
new MockTyperefUnion(dataMap)
}
}
}
| josh-newman/naptime | naptime-models/src/test/scala/org/coursera/naptime/courier/CourierTestFixtures.scala | Scala | apache-2.0 | 15,031 |
package pl.iterators.kebs.support
import pl.iterators.kebs.macros.CaseClass1Rep
trait PartialOrderingSupport {
implicit def partialOrderingFromCaseClass1Rep[A, Rep](implicit cc1Rep: CaseClass1Rep[A, Rep],
partialOrderingRep: PartialOrdering[Rep]): PartialOrdering[A] =
new PartialOrdering[A] {
override def tryCompare(x: A, y: A): Option[Int] = partialOrderingRep.tryCompare(cc1Rep.unapply(x), cc1Rep.unapply(y))
override def lteq(x: A, y: A): Boolean = partialOrderingRep.lteq(cc1Rep.unapply(x), cc1Rep.unapply(y))
}
}
| theiterators/kebs | macro-utils/src/main/scala-3/pl/iterators/kebs/support/PartialOrderingSupport.scala | Scala | mit | 612 |
package monocle.function
import monocle.std.tuple2._
import monocle.{Iso, Lens}
import scala.annotation.implicitNotFound
@implicitNotFound("Could not find an instance of Cons1[${S}, ${H}, ${T}], please check Monocle instance location policy to " +
"find out which import is necessary")
trait Cons1[S, H, T] extends Serializable {
/**
* cons1 defines an [[Iso]] between a S and its head and tail.
* cons1 is like cons but for types that have *always* a head and tail, e.g. a non empty list
*/
def cons1: Iso[S, (H, T)]
def head: Lens[S, H] = cons1 composeLens first
def tail: Lens[S, T] = cons1 composeLens second
}
object Cons1 extends HConsFunctions
trait HConsFunctions {
final def cons1[S, H, T](implicit ev: Cons1[S, H, T]): Iso[S, (H, T)] = ev.cons1
final def head[S, H, T](implicit ev: Cons1[S, H, T]): Lens[S, H] = ev.head
final def tail[S, H, T](implicit ev: Cons1[S, H, T]): Lens[S, T] = ev.tail
/** append an element to the head */
final def _cons1[S, H, T](head: H, tail: T)(implicit ev: Cons1[S, H, T]): S =
ev.cons1.reverseGet((head, tail))
/** deconstruct an S between its head and tail */
final def _uncons1[S, H, T](s: S)(implicit ev: Cons1[S, H, T]): (H, T) =
ev.cons1.get(s)
} | malcolmgreaves/Monocle | core/src/main/scala/monocle/function/Cons1.scala | Scala | mit | 1,248 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.analysis
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.catalyst.expressions.{Attribute, LeafExpression, Unevaluable}
import org.apache.spark.sql.catalyst.plans.logical.LeafNode
import org.apache.spark.sql.catalyst.trees.TreePattern.{TreePattern, UNRESOLVED_FUNC}
import org.apache.spark.sql.catalyst.util.CharVarcharUtils
import org.apache.spark.sql.connector.catalog.{CatalogPlugin, Identifier, Table, TableCatalog}
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
import org.apache.spark.sql.connector.catalog.TableChange.ColumnPosition
import org.apache.spark.sql.types.{DataType, StructField}
/**
* Holds the name of a namespace that has yet to be looked up in a catalog. It will be resolved to
* [[ResolvedNamespace]] during analysis.
*/
case class UnresolvedNamespace(multipartIdentifier: Seq[String]) extends LeafNode {
override lazy val resolved: Boolean = false
override def output: Seq[Attribute] = Nil
}
/**
* Holds the name of a table that has yet to be looked up in a catalog. It will be resolved to
* [[ResolvedTable]] during analysis.
*/
case class UnresolvedTable(
multipartIdentifier: Seq[String],
commandName: String,
relationTypeMismatchHint: Option[String]) extends LeafNode {
override lazy val resolved: Boolean = false
override def output: Seq[Attribute] = Nil
}
/**
* Holds the name of a view that has yet to be looked up in a catalog. It will be resolved to
* [[ResolvedView]] during analysis.
*/
case class UnresolvedView(
multipartIdentifier: Seq[String],
commandName: String,
allowTemp: Boolean,
relationTypeMismatchHint: Option[String]) extends LeafNode {
override lazy val resolved: Boolean = false
override def output: Seq[Attribute] = Nil
}
/**
* Holds the name of a table or view that has yet to be looked up in a catalog. It will
* be resolved to [[ResolvedTable]] or [[ResolvedView]] during analysis.
*/
case class UnresolvedTableOrView(
multipartIdentifier: Seq[String],
commandName: String,
allowTempView: Boolean) extends LeafNode {
override lazy val resolved: Boolean = false
override def output: Seq[Attribute] = Nil
}
sealed trait PartitionSpec extends LeafExpression with Unevaluable {
override def dataType: DataType = throw new IllegalStateException(
"PartitionSpec.dataType should not be called.")
override def nullable: Boolean = throw new IllegalStateException(
"PartitionSpec.nullable should not be called.")
}
case class UnresolvedPartitionSpec(
spec: TablePartitionSpec,
location: Option[String] = None) extends PartitionSpec {
override lazy val resolved = false
}
sealed trait FieldName extends LeafExpression with Unevaluable {
def name: Seq[String]
override def dataType: DataType = throw new IllegalStateException(
"FieldName.dataType should not be called.")
override def nullable: Boolean = throw new IllegalStateException(
"FieldName.nullable should not be called.")
}
case class UnresolvedFieldName(name: Seq[String]) extends FieldName {
override lazy val resolved = false
}
sealed trait FieldPosition extends LeafExpression with Unevaluable {
def position: ColumnPosition
override def dataType: DataType = throw new IllegalStateException(
"FieldPosition.dataType should not be called.")
override def nullable: Boolean = throw new IllegalStateException(
"FieldPosition.nullable should not be called.")
}
case class UnresolvedFieldPosition(position: ColumnPosition) extends FieldPosition {
override lazy val resolved = false
}
/**
* Holds the name of a function that has yet to be looked up in a catalog. It will be resolved to
* [[ResolvedFunc]] during analysis.
*/
case class UnresolvedFunc(multipartIdentifier: Seq[String]) extends LeafNode {
override lazy val resolved: Boolean = false
override def output: Seq[Attribute] = Nil
final override val nodePatterns: Seq[TreePattern] = Seq(UNRESOLVED_FUNC)
}
/**
* A plan containing resolved namespace.
*/
case class ResolvedNamespace(catalog: CatalogPlugin, namespace: Seq[String])
extends LeafNode {
override def output: Seq[Attribute] = Nil
}
/**
* A plan containing resolved table.
*/
case class ResolvedTable(
catalog: TableCatalog,
identifier: Identifier,
table: Table,
outputAttributes: Seq[Attribute])
extends LeafNode {
override def output: Seq[Attribute] = {
val qualifier = catalog.name +: identifier.namespace :+ identifier.name
outputAttributes.map(_.withQualifier(qualifier))
}
def name: String = (catalog.name +: identifier.namespace() :+ identifier.name()).quoted
}
object ResolvedTable {
def create(
catalog: TableCatalog,
identifier: Identifier,
table: Table): ResolvedTable = {
val schema = CharVarcharUtils.replaceCharVarcharWithStringInSchema(table.schema)
ResolvedTable(catalog, identifier, table, schema.toAttributes)
}
}
case class ResolvedPartitionSpec(
names: Seq[String],
ident: InternalRow,
location: Option[String] = None) extends PartitionSpec
case class ResolvedFieldName(path: Seq[String], field: StructField) extends FieldName {
def name: Seq[String] = path :+ field.name
}
case class ResolvedFieldPosition(position: ColumnPosition) extends FieldPosition
/**
* A plan containing resolved (temp) views.
*/
// TODO: create a generic representation for temp view, v1 view and v2 view, after we add view
// support to v2 catalog. For now we only need the identifier to fallback to v1 command.
case class ResolvedView(identifier: Identifier, isTemp: Boolean) extends LeafNode {
override def output: Seq[Attribute] = Nil
}
/**
* A plan containing resolved function.
*/
// TODO: create a generic representation for v1, v2 function, after we add function
// support to v2 catalog. For now we only need the identifier to fallback to v1 command.
case class ResolvedFunc(identifier: Identifier)
extends LeafNode {
override def output: Seq[Attribute] = Nil
}
| jiangxb1987/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/v2ResolutionPlans.scala | Scala | apache-2.0 | 6,910 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.delegation
import org.apache.flink.api.common.RuntimeExecutionMode
import org.apache.flink.api.dag.Transformation
import org.apache.flink.configuration.ExecutionOptions
import org.apache.flink.streaming.api.graph.StreamGraph
import org.apache.flink.table.api.PlanReference.{ContentPlanReference, FilePlanReference, ResourcePlanReference}
import org.apache.flink.table.api.internal.CompiledPlanInternal
import org.apache.flink.table.api.{CompiledPlan, ExplainDetail, PlanReference, TableConfig, TableException}
import org.apache.flink.table.catalog.{CatalogManager, FunctionCatalog}
import org.apache.flink.table.delegation.Executor
import org.apache.flink.table.module.ModuleManager
import org.apache.flink.table.operations.{ModifyOperation, Operation}
import org.apache.flink.table.planner.plan.ExecNodeGraphCompiledPlan
import org.apache.flink.table.planner.plan.`trait`._
import org.apache.flink.table.planner.plan.nodes.exec.ExecNodeGraph
import org.apache.flink.table.planner.plan.nodes.exec.processor.ExecNodeGraphProcessor
import org.apache.flink.table.planner.plan.nodes.exec.serde.JsonSerdeUtil
import org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecNode
import org.apache.flink.table.planner.plan.nodes.exec.utils.ExecNodePlanDumper
import org.apache.flink.table.planner.plan.optimize.{Optimizer, StreamCommonSubGraphBasedOptimizer}
import org.apache.flink.table.planner.plan.utils.FlinkRelOptUtil
import org.apache.flink.table.planner.utils.DummyStreamExecutionEnvironment
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectReader
import org.apache.calcite.plan.{ConventionTraitDef, RelTrait, RelTraitDef}
import org.apache.calcite.sql.SqlExplainLevel
import java.io.{File, IOException}
import java.util
import _root_.scala.collection.JavaConversions._
class StreamPlanner(
executor: Executor,
config: TableConfig,
moduleManager: ModuleManager,
functionCatalog: FunctionCatalog,
catalogManager: CatalogManager)
extends PlannerBase(executor, config, moduleManager, functionCatalog, catalogManager,
isStreamingMode = true) {
override protected def getTraitDefs: Array[RelTraitDef[_ <: RelTrait]] = {
Array(
ConventionTraitDef.INSTANCE,
FlinkRelDistributionTraitDef.INSTANCE,
MiniBatchIntervalTraitDef.INSTANCE,
ModifyKindSetTraitDef.INSTANCE,
UpdateKindTraitDef.INSTANCE)
}
override protected def getOptimizer: Optimizer = new StreamCommonSubGraphBasedOptimizer(this)
override protected def getExecNodeGraphProcessors: Seq[ExecNodeGraphProcessor] = Seq()
override protected def translateToPlan(execGraph: ExecNodeGraph): util.List[Transformation[_]] = {
validateAndOverrideConfiguration()
val planner = createDummyPlanner()
val transformations = execGraph.getRootNodes.map {
case node: StreamExecNode[_] => node.translateToPlan(planner)
case _ =>
throw new TableException("Cannot generate DataStream due to an invalid logical plan. " +
"This is a bug and should not happen. Please file an issue.")
}
cleanupInternalConfigurations()
transformations
}
override def explain(operations: util.List[Operation], extraDetails: ExplainDetail*): String = {
val (sinkRelNodes, optimizedRelNodes, execGraph, streamGraph) = getExplainGraphs(operations)
val sb = new StringBuilder
sb.append("== Abstract Syntax Tree ==")
sb.append(System.lineSeparator)
sinkRelNodes.foreach { sink =>
// use EXPPLAN_ATTRIBUTES to make the ast result more readable
// and to keep the previous behavior
sb.append(FlinkRelOptUtil.toString(sink, SqlExplainLevel.EXPPLAN_ATTRIBUTES))
sb.append(System.lineSeparator)
}
sb.append("== Optimized Physical Plan ==")
sb.append(System.lineSeparator)
val explainLevel = if (extraDetails.contains(ExplainDetail.ESTIMATED_COST)) {
SqlExplainLevel.ALL_ATTRIBUTES
} else {
SqlExplainLevel.DIGEST_ATTRIBUTES
}
val withChangelogTraits = extraDetails.contains(ExplainDetail.CHANGELOG_MODE)
optimizedRelNodes.foreach { rel =>
sb.append(FlinkRelOptUtil.toString(
rel,
explainLevel,
withChangelogTraits = withChangelogTraits))
sb.append(System.lineSeparator)
}
sb.append("== Optimized Execution Plan ==")
sb.append(System.lineSeparator)
sb.append(ExecNodePlanDumper.dagToString(execGraph))
if (extraDetails.contains(ExplainDetail.JSON_EXECUTION_PLAN)) {
sb.append(System.lineSeparator)
sb.append("== Physical Execution Plan ==")
sb.append(System.lineSeparator)
sb.append(streamGraph.getStreamingPlanAsJSON)
}
sb.toString()
}
private def createDummyPlanner(): StreamPlanner = {
val dummyExecEnv = new DummyStreamExecutionEnvironment(getExecEnv)
val executor = new DefaultExecutor(dummyExecEnv)
new StreamPlanner(executor, config, moduleManager, functionCatalog, catalogManager)
}
override def loadPlan(planReference: PlanReference): CompiledPlanInternal = {
val ctx = createSerdeContext
val objectReader: ObjectReader = JsonSerdeUtil.createObjectReader(ctx)
val execNodeGraph = planReference match {
case filePlanReference: FilePlanReference =>
objectReader.readValue(filePlanReference.getFile, classOf[ExecNodeGraph])
case contentPlanReference: ContentPlanReference =>
objectReader.readValue(contentPlanReference.getContent, classOf[ExecNodeGraph])
case resourcePlanReference: ResourcePlanReference => {
val url = resourcePlanReference.getClassLoader
.getResource(resourcePlanReference.getResourcePath)
if (url == null) {
throw new IOException(
"Cannot load the plan reference from classpath: " + planReference);
}
objectReader.readValue(new File(url.toURI), classOf[ExecNodeGraph])
}
case _ => throw new IllegalStateException(
"Unknown PlanReference. This is a bug, please contact the developers")
}
new ExecNodeGraphCompiledPlan(
this,
JsonSerdeUtil.createObjectWriter(createSerdeContext)
.withDefaultPrettyPrinter()
.writeValueAsString(execNodeGraph),
execNodeGraph)
}
override def compilePlan(modifyOperations: util.List[ModifyOperation]): CompiledPlanInternal = {
validateAndOverrideConfiguration()
val relNodes = modifyOperations.map(translateToRel)
val optimizedRelNodes = optimize(relNodes)
val execGraph = translateToExecNodeGraph(optimizedRelNodes)
cleanupInternalConfigurations()
new ExecNodeGraphCompiledPlan(
this,
JsonSerdeUtil.createObjectWriter(createSerdeContext)
.withDefaultPrettyPrinter()
.writeValueAsString(execGraph),
execGraph)
}
override def translatePlan(plan: CompiledPlanInternal): util.List[Transformation[_]] = {
validateAndOverrideConfiguration()
val execGraph = plan.asInstanceOf[ExecNodeGraphCompiledPlan].getExecNodeGraph
val transformations = translateToPlan(execGraph)
cleanupInternalConfigurations()
transformations
}
override def explainPlan(plan: CompiledPlanInternal, extraDetails: ExplainDetail*): String = {
validateAndOverrideConfiguration()
val execGraph = plan.asInstanceOf[ExecNodeGraphCompiledPlan].getExecNodeGraph
val transformations = translateToPlan(execGraph)
cleanupInternalConfigurations()
val streamGraph = executor.createPipeline(transformations, config.getConfiguration, null)
.asInstanceOf[StreamGraph]
val sb = new StringBuilder
sb.append("== Optimized Execution Plan ==")
sb.append(System.lineSeparator)
sb.append(ExecNodePlanDumper.dagToString(execGraph))
if (extraDetails.contains(ExplainDetail.JSON_EXECUTION_PLAN)) {
sb.append(System.lineSeparator)
sb.append("== Physical Execution Plan ==")
sb.append(System.lineSeparator)
sb.append(streamGraph.getStreamingPlanAsJSON)
}
sb.toString()
}
override def validateAndOverrideConfiguration(): Unit = {
super.validateAndOverrideConfiguration()
val runtimeMode = getConfiguration.get(ExecutionOptions.RUNTIME_MODE)
if (runtimeMode != RuntimeExecutionMode.STREAMING) {
throw new IllegalArgumentException(
"Mismatch between configured runtime mode and actual runtime mode. " +
"Currently, the 'execution.runtime-mode' can only be set when instantiating the " +
"table environment. Subsequent changes are not supported. " +
"Please instantiate a new TableEnvironment if necessary."
)
}
}
}
| godfreyhe/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/delegation/StreamPlanner.scala | Scala | apache-2.0 | 9,450 |
import sio.core._
import sio.teletype._
import sio.core.syntax.st._
object callbacks {
def func(f: () => Unit): Impure[Unit] = {
(0 until 3).foreach(_ => f())
}
def run: ST[RW, Unit] = for {
ref <- IORef.create(0)
cb <- ref.modify(_ + 1).map(_ => ()).asCallback
_ <- IO { func(cb) }
i <- ref.read
_ <- putStrLn(s"Done: $i")
} yield ()
}
| alexknvl/sio | example/src/main/scala/callbacks.scala | Scala | mit | 378 |
/*
* Licensed to STRATIO (C) under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. The STRATIO (C) licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.stratio.crossdata.driver
import java.util
import akka.actor.{ActorSelection, ActorSystem}
import akka.contrib.pattern.ClusterClient
import com.stratio.crossdata.common.ask.Connect
import com.stratio.crossdata.common.exceptions._
import com.stratio.crossdata.common.result._
import com.stratio.crossdata.communication.Disconnect
import com.stratio.crossdata.driver.actor.ProxyActor
import com.stratio.crossdata.driver.config.{BasicDriverConfig, DriverConfig, DriverSectionConfig, ServerSectionConfig}
import com.stratio.crossdata.driver.utils.RetryPolitics
import org.apache.log4j.Logger
import scala.concurrent.duration._
object BasicDriver extends DriverConfig {
/**
* Class logger.
*/
override lazy val logger = Logger.getLogger(getClass)
val balancing: Boolean= config.getBoolean("config.balancing")
lazy val auth: Boolean= config.getBoolean("config.authentication")
lazy val serverPathName:String = config.getString("config.serverPathName")
lazy val crossdataServerClusterName:String= config.getString("config.crossdataServerClusterName")
lazy val localAffinity:Boolean=config.getBoolean("config.localAffinity")
def getBasicDriverConfigFromFile:BasicDriverConfig = {
logger.debug("RetryTimes --> " + retryTimes)
logger.debug("RetryDuration --> " + retryDuration.duration.toMillis.toString)
logger.debug("ClusterName --> " + clusterName)
logger.debug("ClusterName --> " + clusterActor)
logger.debug("ClusterHosts --> " + clusterHosts.map(_.toString).toArray.toString)
new BasicDriverConfig(new DriverSectionConfig(retryTimes, retryDuration.duration.toMillis),
new ServerSectionConfig(clusterName, clusterActor, clusterHosts.map(_.toString).toArray))
}
def getBasicDriverConfigFromFile(servers:Array[String]):BasicDriverConfig = {
logger.debug("RetryTimes --> " + retryTimes)
logger.debug("RetryDuration --> " + retryDuration.duration.toMillis.toString)
logger.debug("ClusterName --> " + clusterName)
logger.debug("ClusterName --> " + clusterActor)
logger.debug("ClusterHosts --> " + clusterHosts.map(_.toString).toArray.toString)
new BasicDriverConfig(new DriverSectionConfig(retryTimes, retryDuration.duration.toMillis),
new ServerSectionConfig(clusterName, clusterActor, servers.map(_.toString).toArray))
}
}
class BasicDriver(basicDriverConfig: BasicDriverConfig) {
/**
* Default user to connect to the com.stratio.crossdata server.
*/
private final val DEFAULT_USER: String = "CROSSDATA_USER"
private final val DEFAULT_PASS: String = "CROSSDATA_PASS"
val balancing: Boolean=BasicDriver.balancing;
val serverPathName:String = BasicDriver.serverPathName
val crossdataServerClusterName=BasicDriver.crossdataServerClusterName
val cpuLoadPingTimeInMillis:Long = 5000
val localAffinity:Boolean=BasicDriver.localAffinity
private lazy val logger = BasicDriver.logger
private val system = ActorSystem("CrossdataDriverSystem", BasicDriver.config)
private val initialContacts: Set[ActorSelection] = contactPoints.map(contact => system.actorSelection(contact)).toSet
val clusterClientActor = system.actorOf(ClusterClient.props(initialContacts), "remote-client")
val proxyActor = system.actorOf(ProxyActor.props(clusterClientActor, basicDriverConfig.serverSection.clusterActor, this), "proxy-actor")
/**
* Session-Connection map. Currently, a single connection per driver is allowed.
*/
private lazy val driverConnections: util.Map[String, DriverConnection] = new util.HashMap()
val retryPolitics: RetryPolitics = {
new RetryPolitics(basicDriverConfig.driverSection.retryTimes, basicDriverConfig.driverSection.retryDuration.millis)
}
private lazy val contactPoints: List[String] = {
basicDriverConfig.serverSection.clusterHosts.toList.map(host => "akka.tcp://"
+ basicDriverConfig.serverSection.clusterName + "@" + host + "/user/receptionist")
}
//For Futures
implicit val context = system.dispatcher
var userId: String = ""
var userName: String = ""
var password: String = ""
def this() {
this(BasicDriver.getBasicDriverConfigFromFile)
}
def this(servers:Array[String]) {
this(BasicDriver.getBasicDriverConfigFromFile(servers))
}
/**
* Check if user authentication is enabled.
* @return A Boolean whatever the result is.
*/
def isAuthEnable():Boolean= BasicDriver.auth
/**
* Check if the user and pass are allowed to access to Crossdata Server.
* @param user The user.
* @param password The pass.
* @return A Boolean whatever the result is.
*/
private def checkUser(user: String, password: String):Boolean = true
/**
* Release connection to CrossdataServer.
* @param user Login to the user (Audit only).
* @return ConnectResult.
*/
@throws(classOf[ConnectionException])
def connect(user: String, pass: String): DriverConnection = {
logger.info("Establishing connection with user: " + user + " to " + contactPoints)
if (!checkUser(user,pass)){
logger.info("Connection error")
throw new ConnectionException("Authentication Error. Check your user or password!")
}
val result = retryPolitics.askRetry(proxyActor, new Connect(user, pass), 5 second)
result match {
case errorResult: ErrorResult => {
logger.info("Connection error")
throw new ConnectionException(errorResult.getErrorMessage)
}
case connectResult: ConnectResult => {
logger.info("Connection established")
userId = connectResult.getSessionId
val driverConnection = new DriverConnection(connectResult.getSessionId, userId, this)
driverConnections.put(connectResult.getSessionId, driverConnection)
driverConnection
}
}
}
/**
* Finnish connection to CrossdataServer.
*/
@throws(classOf[ConnectionException])
def disconnect(): Unit = {
logger.info("Disconnecting user: " + userId + " to " + contactPoints)
val result = retryPolitics.askRetry(proxyActor, new Disconnect(userId), 5 second, retry = 2)
result match {
case errorResult: ErrorResult => {
throw new ConnectionException(errorResult.getErrorMessage)
}
case connectResult: DisconnectResult => {
//TODO disconnect session
userId = ""
driverConnections.clear()
}
}
}
/**
* Shutdown actor system.
*/
def close() {
system.shutdown()
}
//TODO review API (private and public methods). User and pass should be stored in DriverConnection
/**
* This method get the UserName.
* @return the value of userName.
* */
def getUserName: String = userName
def getPassword: String = password
def setUserName(userName: String) {
this.userName = userName
if (userName.isEmpty) {
this.userName = DEFAULT_USER
}
}
def setPassword(password: String) {
this.password = password
if (password.isEmpty) {
this.password = DEFAULT_PASS
}
}
def getDriverConnections = driverConnections
}
| ccaballe/crossdata | crossdata-driver/src/main/scala/com/stratio/crossdata/driver/BasicDriver.scala | Scala | apache-2.0 | 7,824 |
package es.pirita.techfest2015
/**
* @author Ignacio Navarro Martin
* @version 1.0
*/
object StringUtils{
implicit class StringImprove(val s: String){
def plusN(n: Int) : String = s"$s $n"
}
}
object Part9 extends App{
////
///Implicits
///
//View
//implicit def strToInt(x: String): Int = x.toInt
//val y: Int = "123"
//Variables implicitas
//class Prefixer(val prefix: String)
//def addPrefix(s: String)(implicit p: Prefixer) = p.prefix + s
//Pimp my library
//String is final
import StringUtils._
"23" plusN 25
}
| pirita/TechFest2015 | src/es/pirita/techfest2015/Part9.scala | Scala | apache-2.0 | 561 |
package org.jetbrains.plugins.scala.annotator.template
import org.jetbrains.plugins.scala.annotator.{AnnotatorTestBase, Error}
/**
* Pavel Fatin
*/
class ObjectCreationImpossibleTest extends AnnotatorTestBase(ObjectCreationImpossible) {
def testFineNew {
assertNothing(messages("class C; new C"))
assertNothing(messages("class C; new C {}"))
assertNothing(messages("class C; trait T; new C with T"))
assertNothing(messages("class C; trait T; new C with T {}"))
}
def testFineObject {
assertNothing(messages("class C; object O extends C"))
assertNothing(messages("class C; object O extends C {}"))
assertNothing(messages("class C; trait T; object O extends C with T"))
assertNothing(messages("class C; trait T; object O extends C with T {}"))
}
def testTypeSkipDeclarations {
assertNothing(messages("class C { def f }"))
}
def testSkipAbstractInstantiations {
assertNothing(messages("trait T; new T"))
}
def testSkipConcrete {
assertNothing(messages("class C { def f }; new C"))
assertNothing(messages("class C { def f }; new C {}"))
assertNothing(messages("class C { def f }; new Object with C"))
assertNothing(messages("class C { def f }; new Object with C {}"))
}
def testSkipInvalidDirect {
assertNothing(messages("new { def f }"))
assertNothing(messages("new Object { def f }"))
assertNothing(messages("object O { def f }"))
}
def testUndefinedMember {
val Message = ObjectCreationImpossible.message(("f: Unit", "Holder.T"))
assertMatches(messages("trait T { def f }; new T {}")) {
case Error("T", Message) :: Nil =>
}
}
def testUndefinedMemberObject {
val Message = ObjectCreationImpossible.message(("f: Unit", "Holder.T"))
assertMatches(messages("trait T { def f }; object O extends T {}")) {
case Error("O", Message) :: Nil =>
}
}
def testUndefinedAndWith{
val Message = ObjectCreationImpossible.message(("f: Unit", "Holder.T"))
assertMatches(messages("trait T { def f }; new Object with T {}")) {
case Error("Object", Message) :: Nil =>
}
}
def testNeedsToBeAbstractPlaceDiffer {
val Message = ObjectCreationImpossible.message(
("b: Unit", "Holder.B"), ("a: Unit", "Holder.A"))
val ReversedMessage = ObjectCreationImpossible.message(
("a: Unit", "Holder.A"), ("b: Unit", "Holder.B"))
assertMatches(messages("trait A { def a }; trait B { def b }; new A with B {}")) {
case Error("A", Message) :: Nil =>
case Error("A", ReversedMessage) :: Nil =>
}
}
def testSkipTypeDeclarationSCL2887 {
assertMatches(messages("trait A { type a }; new A {}")) {
case Nil =>
}
}
} | LPTK/intellij-scala | test/org/jetbrains/plugins/scala/annotator/template/ObjectCreationImpossibleTest.scala | Scala | apache-2.0 | 2,708 |
/*
* Copyright (C) 2010 Lalit Pant <[email protected]>
*
* The contents of this file are subject to the GNU General Public License
* Version 3 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.gnu.org/copyleft/gpl.html
*
* Software distributed under the License is distributed on an "AS
* IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*
*/
package net.kogics.kojo
import net.kogics.kojo.util.Utils
import org.openide.windows.TopComponent
object KojoCtx extends Singleton[KojoCtx] {
protected def newInstance = new KojoCtx
}
class KojoCtx extends core.KojoCtx {
def activate(tc: TopComponent) {
if (!tc.isOpened) {
tc.open()
}
tc.requestActive()
}
def makeTurtleWorldVisible() {
Utils.runInSwingThreadAndWait {
val tc = SCanvasTopComponent.findInstance()
activate(tc)
}
activateCodeEditor()
xscala.CodeCompletionUtils.activateTw()
}
def makeStagingVisible() {
Utils.runInSwingThreadAndWait {
val tc = SCanvasTopComponent.findInstance()
activate(tc)
}
activateCodeEditor()
xscala.CodeCompletionUtils.activateStaging()
}
def makeMathWorldVisible() {
Utils.runInSwingThreadAndWait {
val tc = GeoGebraTopComponent.findInstance()
activate(tc)
}
activateCodeEditor()
xscala.CodeCompletionUtils.activateMw()
}
def makeStoryTellerVisible() {
Utils.runInSwingThreadAndWait {
val tc = story.StoryTellerTopComponent.findInstance()
activate(tc)
}
activateCodeEditor()
}
def activateCodeEditor() {
Utils.runInSwingThreadAndWait {
val tc = CodeEditorTopComponent.findInstance()
activate(tc)
}
}
def baseDir = Utils.runInSwingThreadAndWait {
CodeEditorTopComponent.findInstance().getLastLoadStoreDir() + "/"
}
def stopAnimation() = Utils.runInSwingThread {
CodeExecutionSupport.instance.stopAnimation()
}
} | dotta/kojo | KojoEnv/src/net/kogics/kojo/KojoCtx.scala | Scala | gpl-3.0 | 2,115 |
package mesosphere.marathon.upgrade
import com.wix.accord._
import mesosphere.marathon._
import mesosphere.marathon.api.v2.ValidationHelper
import mesosphere.marathon.state.AppDefinition.VersionInfo
import mesosphere.marathon.state.AppDefinition.VersionInfo.FullVersionInfo
import mesosphere.marathon.state.PathId._
import mesosphere.marathon.state._
import mesosphere.marathon.storage.TwitterZk
import mesosphere.marathon.test.{ MarathonSpec, MarathonTestHelper, Mockito }
import org.apache.mesos.{ Protos => mesos }
import org.scalatest.{ GivenWhenThen, Matchers }
import scala.collection.immutable.Seq
class DeploymentPlanTest extends MarathonSpec with Matchers with GivenWhenThen with Mockito {
protected def actionsOf(plan: DeploymentPlan): Seq[DeploymentAction] =
plan.steps.flatMap(_.actions)
test("partition a simple group's apps into concurrently deployable subsets") {
Given("a group of four apps with some simple dependencies")
val aId = "/test/database/a".toPath
val bId = "/test/service/b".toPath
val cId = "/c".toPath
val dId = "/d".toPath
val a = AppDefinition(aId)
val b = AppDefinition(bId, dependencies = Set(aId))
val c = AppDefinition(cId, dependencies = Set(aId))
val d = AppDefinition(dId, dependencies = Set(bId))
val group = Group("/".toPath, groups = Set(Group(
id = "/test".toPath,
apps = Map(c.id -> c, d.id -> d),
groups = Set(
Group("/test/database".toPath, Map(a.id -> a)),
Group("/test/service".toPath, Map(b.id -> b))
)
)))
When("the group's apps are grouped by the longest outbound path")
val partitionedApps = DeploymentPlan.appsGroupedByLongestPath(group)
Then("three equivalence classes should be computed")
partitionedApps should have size (3)
partitionedApps.keySet should contain (1)
partitionedApps.keySet should contain (2)
partitionedApps.keySet should contain (3)
partitionedApps(2) should have size (2)
}
test("partition a complex group's apps into concurrently deployable subsets") {
Given("a group of four apps with some simple dependencies")
val aId = "/a".toPath
val bId = "/b".toPath
val cId = "/c".toPath
val dId = "/d".toPath
val eId = "/e".toPath
val fId = "/f".toPath
val a = AppDefinition(aId, dependencies = Set(bId, cId))
val b = AppDefinition(bId, dependencies = Set(cId))
val c = AppDefinition(cId, dependencies = Set(dId))
val d = AppDefinition(dId)
val e = AppDefinition(eId)
val group = Group(
id = "/".toPath,
apps = Map(a.id -> a, b.id -> b, c.id -> c, d.id -> d, e.id -> e)
)
When("the group's apps are grouped by the longest outbound path")
val partitionedApps = DeploymentPlan.appsGroupedByLongestPath(group)
Then("three equivalence classes should be computed")
partitionedApps should have size (4)
partitionedApps.keySet should contain (1)
partitionedApps.keySet should contain (2)
partitionedApps.keySet should contain (3)
partitionedApps.keySet should contain (4)
partitionedApps(1) should have size (2)
}
test("start from empty group") {
val app = AppDefinition("/app".toPath, instances = 2)
val from = Group("/".toPath, Group.defaultApps, groups = Set(Group("/group".toPath, Map.empty, groups = Set.empty)))
val to = Group("/".toPath, groups = Set(Group("/group".toPath, Map(app.id -> app))))
val plan = DeploymentPlan(from, to)
actionsOf(plan) should contain (StartApplication(app, 0))
actionsOf(plan) should contain (ScaleApplication(app, app.instances))
}
test("start from running group") {
val app1 = AppDefinition("/app".toPath, Some("sleep 10"))
val app2 = AppDefinition("/app2".toPath, Some("cmd2"))
val app3 = AppDefinition("/app3".toPath, Some("cmd3"))
val updatedApp1 = AppDefinition("/app".toPath, Some("sleep 30"))
val updatedApp2 = AppDefinition("/app2".toPath, Some("cmd2"), instances = 10)
val app4 = AppDefinition("/app4".toPath, Some("cmd4"))
val apps = Map(app1.id -> app1, app2.id -> app2, app3.id -> app3)
val update = Map(updatedApp1.id -> updatedApp1, updatedApp2.id -> updatedApp2, app4.id -> app4)
val from = Group("/".toPath, groups = Set(Group("/group".toPath, apps)))
val to = Group("/".toPath, groups = Set(Group("/group".toPath, update)))
val plan = DeploymentPlan(from, to)
/*
plan.toStart should have size 1
plan.toRestart should have size 1
plan.toScale should have size 1
plan.toStop should have size 1
*/
}
test("can compute affected app ids") {
val versionInfo = AppDefinition.VersionInfo.forNewConfig(Timestamp(10))
val app: AppDefinition = AppDefinition("/app".toPath, Some("sleep 10"), versionInfo = versionInfo)
val app2: AppDefinition = AppDefinition("/app2".toPath, Some("cmd2"), versionInfo = versionInfo)
val app3: AppDefinition = AppDefinition("/app3".toPath, Some("cmd3"), versionInfo = versionInfo)
val unchanged: AppDefinition = AppDefinition("/unchanged".toPath, Some("unchanged"), versionInfo = versionInfo)
val apps = Map(app.id -> app, app2.id -> app2, app3.id -> app3, unchanged.id -> unchanged)
val updatedApp = app.copy(cmd = Some("sleep 30"))
val updatedApp2 = app2.copy(instances = 10)
val updatedApp4 = AppDefinition("/app4".toPath, Some("cmd4"))
val update = Map(
updatedApp.id -> updatedApp,
updatedApp2.id -> updatedApp2,
updatedApp4.id -> updatedApp4,
unchanged.id -> unchanged
)
val from = Group("/".toPath, groups = Set(Group("/group".toPath, apps)))
val to = Group("/".toPath, groups = Set(Group("/group".toPath, update)))
val plan = DeploymentPlan(from, to)
plan.affectedApplicationIds should equal (Set("/app".toPath, "/app2".toPath, "/app3".toPath, "/app4".toPath))
plan.isAffectedBy(plan) should equal (right = true)
plan.isAffectedBy(DeploymentPlan(from, from)) should equal (right = false)
}
test("when updating a group with dependencies, the correct order is computed") {
Given("Two application updates with command and scale changes")
val mongoId = "/test/database/mongo".toPath
val serviceId = "/test/service/srv1".toPath
val strategy = UpgradeStrategy(0.75)
val versionInfo = AppDefinition.VersionInfo.forNewConfig(Timestamp(10))
val mongo: (AppDefinition, AppDefinition) =
AppDefinition(mongoId, Some("mng1"), instances = 4, upgradeStrategy = strategy, versionInfo = versionInfo) ->
AppDefinition(mongoId, Some("mng2"), instances = 8, upgradeStrategy = strategy, versionInfo = versionInfo)
val service: (AppDefinition, AppDefinition) =
AppDefinition(serviceId, Some("srv1"), instances = 4, upgradeStrategy = strategy, versionInfo = versionInfo) ->
AppDefinition(
serviceId, Some("srv2"), dependencies = Set(mongoId), instances = 10, upgradeStrategy = strategy,
versionInfo = versionInfo
)
val from = Group("/".toPath, groups = Set(Group(
id = "/test".toPath,
groups = Set(
Group("/test/database".toPath, Map(mongo._1.id -> mongo._1)),
Group("/test/service".toPath, Map(service._1.id -> service._1))
)
)
))
val to = Group("/".toPath, groups = Set(Group("/test".toPath, groups = Set(
Group("/test/database".toPath, Map(mongo._2.id -> mongo._2)),
Group("/test/service".toPath, Map(service._2.id -> service._2))
))))
When("the deployment plan is computed")
val plan = DeploymentPlan(from, to)
Then("the deployment steps are correct")
plan.steps should have size 2
plan.steps(0).actions.toSet should equal (Set(RestartApplication(mongo._2)))
plan.steps(1).actions.toSet should equal (Set(RestartApplication(service._2)))
}
test("when starting apps without dependencies, they are first started and then scaled parallely") {
Given("an empty group and the same group but now including four independent apps")
val emptyGroup = Group(id = "/test".toPath)
val instances: Int = 10
val apps: Map[AppDefinition.AppKey, AppDefinition] = (1 to 4).map { i =>
val app = AppDefinition(s"/test/$i".toPath, Some("cmd"), instances = instances)
app.id -> app
}(collection.breakOut)
val targetGroup = Group("/".toPath, groups = Set(Group(
id = "/test".toPath,
apps = apps,
groups = Set()
)))
When("the deployment plan is computed")
val plan = DeploymentPlan(emptyGroup, targetGroup)
Then("we get two deployment steps")
plan.steps should have size 2
Then("the first with all StartApplication actions")
plan.steps(0).actions.toSet should equal (apps.mapValues(StartApplication(_, 0)).values.toSet)
Then("and the second with all ScaleApplication actions")
plan.steps(1).actions.toSet should equal (apps.mapValues(ScaleApplication(_, instances)).values.toSet)
}
test("when updating apps without dependencies, the restarts are executed in the same step") {
Given("Two application updates with command and scale changes")
val mongoId = "/test/database/mongo".toPath
val serviceId = "/test/service/srv1".toPath
val strategy = UpgradeStrategy(0.75)
val versionInfo = AppDefinition.VersionInfo.forNewConfig(Timestamp(10))
val mongo =
AppDefinition(mongoId, Some("mng1"), instances = 4, upgradeStrategy = strategy, versionInfo = versionInfo) ->
AppDefinition(mongoId, Some("mng2"), instances = 8, upgradeStrategy = strategy, versionInfo = versionInfo)
val service =
AppDefinition(serviceId, Some("srv1"), instances = 4, upgradeStrategy = strategy, versionInfo = versionInfo) ->
AppDefinition(serviceId, Some("srv2"), instances = 10, upgradeStrategy = strategy, versionInfo = versionInfo)
val from: Group = Group("/".toPath, groups = Set(Group("/test".toPath, groups = Set(
Group("/test/database".toPath, Map(mongo._1.id -> mongo._1)),
Group("/test/service".toPath, Map(service._1.id -> service._1))
))))
val to: Group = Group("/".toPath, groups = Set(Group("/test".toPath, groups = Set(
Group("/test/database".toPath, Map(mongo._2.id -> mongo._2)),
Group("/test/service".toPath, Map(service._2.id -> service._2))
))))
When("the deployment plan is computed")
val plan = DeploymentPlan(from, to)
Then("the deployment steps are correct")
plan.steps should have size 1
plan.steps(0).actions.toSet should equal (Set(RestartApplication(mongo._2), RestartApplication(service._2)))
}
test("when updating a group with dependent and independent applications, the correct order is computed") {
Given("application updates with command and scale changes")
val mongoId = "/test/database/mongo".toPath
val serviceId = "/test/service/srv1".toPath
val appId = "/test/independent/app".toPath
val strategy = UpgradeStrategy(0.75)
val versionInfo = AppDefinition.VersionInfo.forNewConfig(Timestamp(10))
val mongo =
AppDefinition(mongoId, Some("mng1"), instances = 4, upgradeStrategy = strategy, versionInfo = versionInfo) ->
AppDefinition(mongoId, Some("mng2"), instances = 8, upgradeStrategy = strategy, versionInfo = versionInfo)
val service =
AppDefinition(serviceId, Some("srv1"), instances = 4, upgradeStrategy = strategy, versionInfo = versionInfo) ->
AppDefinition(serviceId, Some("srv2"), dependencies = Set(mongoId), instances = 10, upgradeStrategy = strategy,
versionInfo = versionInfo)
val independent =
AppDefinition(appId, Some("app1"), instances = 1, upgradeStrategy = strategy) ->
AppDefinition(appId, Some("app2"), instances = 3, upgradeStrategy = strategy)
val toStop = AppDefinition("/test/service/toStop".toPath, instances = 1, dependencies = Set(mongoId))
val toStart = AppDefinition("/test/service/toStart".toPath, instances = 2, dependencies = Set(serviceId))
val from: Group = Group("/".toPath, groups = Set(Group("/test".toPath, groups = Set(
Group("/test/database".toPath, Map(mongo._1.id -> mongo._1)),
Group("/test/service".toPath, Map(service._1.id -> service._1, toStop.id -> toStop)),
Group("/test/independent".toPath, Map(independent._1.id -> independent._1))
))))
val to: Group = Group("/".toPath, groups = Set(Group("/test".toPath, groups = Set(
Group("/test/database".toPath, Map(mongo._2.id -> mongo._2)),
Group("/test/service".toPath, Map(service._2.id -> service._2, toStart.id -> toStart)),
Group("/test/independent".toPath, Map(independent._2.id -> independent._2))
))))
When("the deployment plan is computed")
val plan = DeploymentPlan(from, to)
Then("the deployment contains steps for dependent and independent applications")
plan.steps should have size (5)
actionsOf(plan) should have size (6)
plan.steps(0).actions.toSet should equal (Set(StopApplication(toStop)))
plan.steps(1).actions.toSet should equal (Set(StartApplication(toStart, 0)))
plan.steps(2).actions.toSet should equal (Set(RestartApplication(mongo._2), RestartApplication(independent._2)))
plan.steps(3).actions.toSet should equal (Set(RestartApplication(service._2)))
plan.steps(4).actions.toSet should equal (Set(ScaleApplication(toStart, 2)))
}
test("when the only action is to stop an application") {
Given("application updates with only the removal of an app")
val strategy = UpgradeStrategy(0.75)
val app = AppDefinition("/test/independent/app".toPath, Some("app2"), instances = 3, upgradeStrategy = strategy) -> None
val from: Group = Group(
id = Group.empty.id,
groups = Set(Group("/test".toPath, groups = Set(
Group("/test/independent".toPath, Map(app._1.id -> app._1))
))))
val to: Group = Group(id = Group.empty.id, groups = Set(Group("/test".toPath)))
When("the deployment plan is computed")
val plan = DeploymentPlan(from, to)
Then("the deployment contains one step consisting of one stop action")
plan.steps should have size 1
plan.steps(0).actions.toSet should be(Set(StopApplication(app._1)))
}
// regression test for #765
test("Should create non-empty deployment plan when only args have changed") {
val versionInfo: FullVersionInfo = AppDefinition.VersionInfo.forNewConfig(Timestamp(10))
val app = AppDefinition(id = "/test".toPath, cmd = Some("sleep 5"), versionInfo = versionInfo)
val appNew = app.copy(args = Some(Seq("foo")))
val from = Group("/".toPath, apps = Map(app.id -> app))
val to = from.copy(apps = Map(appNew.id -> appNew))
val plan = DeploymentPlan(from, to)
plan.steps should not be empty
}
// regression test for #1007
test("Don't restart apps that have not changed") {
val app = AppDefinition(
id = "/test".toPath,
cmd = Some("sleep 5"),
instances = 1,
versionInfo = VersionInfo.forNewConfig(Timestamp(10))
)
val appNew = app.copy(instances = 1) // no change
val from = Group("/".toPath, apps = Map(app.id -> app))
val to = from.copy(apps = Map(appNew.id -> appNew))
DeploymentPlan(from, to) should be (empty)
}
test("Restart apps that have not changed but a new version") {
val app = AppDefinition(
id = "/test".toPath,
cmd = Some("sleep 5"),
versionInfo = VersionInfo.forNewConfig(Timestamp(10))
)
val appNew = app.markedForRestarting
val from = Group("/".toPath, apps = Map(app.id -> app))
val to = from.copy(apps = Map(appNew.id -> appNew))
DeploymentPlan(from, to).steps should have size (1)
DeploymentPlan(from, to).steps.head should be (DeploymentStep(Seq(RestartApplication(appNew))))
}
test("ScaleApplication step is created with TasksToKill") {
Given("a group with one app")
val aId = "/test/some/a".toPath
val oldApp = AppDefinition(aId, versionInfo = AppDefinition.VersionInfo.forNewConfig(Timestamp(10)))
When("A deployment plan is generated")
val originalGroup = Group("/".toPath, groups = Set(Group(
id = "/test".toPath,
apps = Map(oldApp.id -> oldApp),
groups = Set(
Group("/test/some".toPath, Map(oldApp.id -> oldApp))
)
)))
val newApp = oldApp.copy(instances = 5)
val targetGroup = Group("/".toPath, groups = Set(Group(
id = "/test".toPath,
apps = Map(newApp.id -> newApp),
groups = Set(
Group("/test/some".toPath, Map(newApp.id -> newApp))
)
)))
val taskToKill = MarathonTestHelper.stagedTaskForApp(aId)
val plan = DeploymentPlan(
original = originalGroup,
target = targetGroup,
resolveArtifacts = Seq.empty,
version = Timestamp.now(),
toKill = Map(aId -> Set(taskToKill)))
Then("DeploymentSteps should include ScaleApplication w/ tasksToKill")
plan.steps should not be empty
plan.steps.head.actions.head shouldEqual ScaleApplication(newApp, 5, Some(Set(taskToKill)))
}
test("Deployment plan allows valid updates for resident tasks") {
Given("All options are supplied and we have a valid group change")
val f = new Fixture()
When("We create a scale deployment")
val app = f.validResident.copy(instances = 123)
val group = f.group.copy(apps = Map(app.id -> app))
val plan = DeploymentPlan(f.group, group)
Then("The deployment is valid")
validate(plan)(f.validator).isSuccess should be(true)
}
test("Deployment plan validation fails for invalid changes in resident tasks") {
Given("All options are supplied and we have a valid group change")
val f = new Fixture()
When("We update the upgrade strategy to the default strategy")
val app2 = f.validResident.copy(upgradeStrategy = AppDefinition.DefaultUpgradeStrategy)
val group2 = f.group.copy(groupsById = Set(f.group.group(PathId("/test")).get.copy(apps = Map(app2.id -> app2)))
.map(group => group.id -> group)(collection.breakOut))
val plan2 = DeploymentPlan(f.group, group2)
Then("The deployment is not valid")
validate(plan2)(f.validator).isSuccess should be(false)
}
test("Deployment plan validation fails if the deployment plan is too big") {
Given("All options are supplied and we have a valid group change, but the deployment plan size limit is small")
val f = new Fixture()
val validator = DeploymentPlan.deploymentPlanValidator(MarathonTestHelper.defaultConfig(
internalStorageBackend = Some(TwitterZk.StoreName),
maxZkNodeSize = Some(1)))
When("We create a scale deployment")
val app = f.validResident.copy(instances = 123)
val group = f.group.copy(apps = Map(app.id -> app))
val plan = DeploymentPlan(f.group, group)
Then("The deployment is valid")
val result = validate(plan)(validator)
val violations = ValidationHelper.getAllRuleConstrains(result)
result.isFailure should be(true)
ValidationHelper.getAllRuleConstrains(result).head.message should be ("""The way we persist data in ZooKeeper would exceed the maximum ZK node size (1 bytes).
|You can adjust this value via --zk_max_node_size, but make sure this value is compatible with
|your ZooKeeper ensemble!
|See: http://zookeeper.apache.org/doc/r3.3.1/zookeeperAdmin.html#Unsafe+Options""".stripMargin)
}
class Fixture {
def persistentVolume(path: String) = PersistentVolume(path, PersistentVolumeInfo(123), mesos.Volume.Mode.RW)
val zero = UpgradeStrategy(0, 0)
def residentApp(id: String, volumes: Seq[PersistentVolume]): AppDefinition = {
AppDefinition(
id = PathId(id),
container = Some(Container.Mesos(volumes)),
residency = Some(Residency(123, Protos.ResidencyDefinition.TaskLostBehavior.RELAUNCH_AFTER_TIMEOUT))
)
}
val vol1 = persistentVolume("foo")
val vol2 = persistentVolume("bla")
val vol3 = persistentVolume("test")
val validResident = residentApp("/app1", Seq(vol1, vol2)).copy(upgradeStrategy = zero)
val group = Group("/".toPath, groups = Set(Group(PathId("/test"), apps = Map(validResident.id -> validResident))))
val marathonConf = MarathonTestHelper.defaultConfig()
val validator = DeploymentPlan.deploymentPlanValidator(marathonConf)
}
}
| timcharper/marathon | src/test/scala/mesosphere/marathon/upgrade/DeploymentPlanTest.scala | Scala | apache-2.0 | 20,454 |
package ohnosequences.tabula
case object attributes {
import ohnosequences.cosas._, typeUnions._, properties._
import scala.reflect.ClassTag
trait AnyAttribute extends AnyProperty {
// should be provieded implicitly:
val rawTag: ClassTag[Raw]
val validRaw: Raw isOneOf ValidValues
}
class Attribute[R](val label: String)
(implicit
val rawTag: ClassTag[R],
val validRaw: R isOneOf ValidValues
) extends AnyAttribute { type Raw = R }
}
| ohnosequences/tabula | src/main/scala/tabula/attributes.scala | Scala | agpl-3.0 | 483 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ui
import java.net.{BindException, ServerSocket}
import java.net.{URI, URL}
import java.util.Locale
import javax.servlet._
import javax.servlet.http.{HttpServlet, HttpServletRequest, HttpServletResponse}
import scala.io.Source
import org.eclipse.jetty.servlet.{ServletContextHandler, ServletHolder}
import org.eclipse.jetty.util.thread.QueuedThreadPool
import org.mockito.Mockito.{mock, when}
import org.scalatest.concurrent.Eventually._
import org.scalatest.time.SpanSugar._
import org.apache.spark._
import org.apache.spark.LocalSparkContext._
import org.apache.spark.internal.config.UI
import org.apache.spark.util.Utils
class UISuite extends SparkFunSuite {
/**
* Create a test SparkContext with the SparkUI enabled.
* It is safe to `get` the SparkUI directly from the SparkContext returned here.
*/
private def newSparkContext(): SparkContext = {
val conf = new SparkConf()
.setMaster("local")
.setAppName("test")
.set(UI.UI_ENABLED, true)
val sc = new SparkContext(conf)
assert(sc.ui.isDefined)
sc
}
private def sslDisabledConf(): (SparkConf, SecurityManager, SSLOptions) = {
val conf = new SparkConf
val securityMgr = new SecurityManager(conf)
(conf, securityMgr, securityMgr.getSSLOptions("ui"))
}
private def sslEnabledConf(sslPort: Option[Int] = None):
(SparkConf, SecurityManager, SSLOptions) = {
val keyStoreFilePath = getTestResourcePath("spark.keystore")
val conf = new SparkConf()
.set("spark.ssl.ui.enabled", "true")
.set("spark.ssl.ui.keyStore", keyStoreFilePath)
.set("spark.ssl.ui.keyStorePassword", "123456")
.set("spark.ssl.ui.keyPassword", "123456")
sslPort.foreach { p =>
conf.set("spark.ssl.ui.port", p.toString)
}
val securityMgr = new SecurityManager(conf)
(conf, securityMgr, securityMgr.getSSLOptions("ui"))
}
ignore("basic ui visibility") {
withSpark(newSparkContext()) { sc =>
// test if the ui is visible, and all the expected tabs are visible
eventually(timeout(10.seconds), interval(50.milliseconds)) {
val html = Utils.tryWithResource(Source.fromURL(sc.ui.get.webUrl))(_.mkString)
assert(!html.contains("random data that should not be present"))
assert(html.toLowerCase(Locale.ROOT).contains("stages"))
assert(html.toLowerCase(Locale.ROOT).contains("storage"))
assert(html.toLowerCase(Locale.ROOT).contains("environment"))
assert(html.toLowerCase(Locale.ROOT).contains("executors"))
}
}
}
ignore("visibility at localhost:4040") {
withSpark(newSparkContext()) { sc =>
// test if visible from http://localhost:4040
eventually(timeout(10.seconds), interval(50.milliseconds)) {
val html = Utils.tryWithResource(Source.fromURL("http://localhost:4040"))(_.mkString)
assert(html.toLowerCase(Locale.ROOT).contains("stages"))
}
}
}
test("jetty selects different port under contention") {
var server: ServerSocket = null
var serverInfo1: ServerInfo = null
var serverInfo2: ServerInfo = null
val (conf, _, sslOptions) = sslDisabledConf()
try {
server = new ServerSocket(0)
val startPort = server.getLocalPort
serverInfo1 = JettyUtils.startJettyServer("0.0.0.0", startPort, sslOptions, conf)
serverInfo2 = JettyUtils.startJettyServer("0.0.0.0", startPort, sslOptions, conf)
// Allow some wiggle room in case ports on the machine are under contention
val boundPort1 = serverInfo1.boundPort
val boundPort2 = serverInfo2.boundPort
assert(boundPort1 != startPort)
assert(boundPort2 != startPort)
assert(boundPort1 != boundPort2)
} finally {
stopServer(serverInfo1)
stopServer(serverInfo2)
closeSocket(server)
}
}
test("jetty with https selects different port under contention") {
var server: ServerSocket = null
var serverInfo1: ServerInfo = null
var serverInfo2: ServerInfo = null
try {
server = new ServerSocket(0)
val startPort = server.getLocalPort
val (conf, _, sslOptions) = sslEnabledConf()
serverInfo1 = JettyUtils.startJettyServer("0.0.0.0", startPort, sslOptions, conf, "server1")
serverInfo2 = JettyUtils.startJettyServer("0.0.0.0", startPort, sslOptions, conf, "server2")
// Allow some wiggle room in case ports on the machine are under contention
val boundPort1 = serverInfo1.boundPort
val boundPort2 = serverInfo2.boundPort
assert(boundPort1 != startPort)
assert(boundPort2 != startPort)
assert(boundPort1 != boundPort2)
} finally {
stopServer(serverInfo1)
stopServer(serverInfo2)
closeSocket(server)
}
}
test("jetty binds to port 0 correctly") {
var socket: ServerSocket = null
var serverInfo: ServerInfo = null
val (conf, _, sslOptions) = sslDisabledConf()
try {
serverInfo = JettyUtils.startJettyServer("0.0.0.0", 0, sslOptions, conf)
val server = serverInfo.server
val boundPort = serverInfo.boundPort
assert(server.getState === "STARTED")
assert(boundPort != 0)
intercept[BindException] {
socket = new ServerSocket(boundPort)
}
} finally {
stopServer(serverInfo)
closeSocket(socket)
}
}
test("jetty with https binds to port 0 correctly") {
var socket: ServerSocket = null
var serverInfo: ServerInfo = null
try {
val (conf, _, sslOptions) = sslEnabledConf()
serverInfo = JettyUtils.startJettyServer("0.0.0.0", 0, sslOptions, conf)
val server = serverInfo.server
val boundPort = serverInfo.boundPort
assert(server.getState === "STARTED")
assert(boundPort != 0)
assert(serverInfo.securePort.isDefined)
intercept[BindException] {
socket = new ServerSocket(boundPort)
}
} finally {
stopServer(serverInfo)
closeSocket(socket)
}
}
test("verify webUrl contains the scheme") {
withSpark(newSparkContext()) { sc =>
val ui = sc.ui.get
val uiAddress = ui.webUrl
assert(uiAddress.startsWith("http://") || uiAddress.startsWith("https://"))
}
}
test("verify webUrl contains the port") {
withSpark(newSparkContext()) { sc =>
val ui = sc.ui.get
val splitUIAddress = ui.webUrl.split(':')
val boundPort = ui.boundPort
assert(splitUIAddress(2).toInt == boundPort)
}
}
test("verify proxy rewrittenURI") {
val prefix = "/worker-id"
val target = "http://localhost:8081"
val path = "/worker-id/json"
var rewrittenURI = JettyUtils.createProxyURI(prefix, target, path, null)
assert(rewrittenURI.toString() === "http://localhost:8081/json")
rewrittenURI = JettyUtils.createProxyURI(prefix, target, path, "test=done")
assert(rewrittenURI.toString() === "http://localhost:8081/json?test=done")
rewrittenURI = JettyUtils.createProxyURI(prefix, target, "/worker-id", null)
assert(rewrittenURI.toString() === "http://localhost:8081")
rewrittenURI = JettyUtils.createProxyURI(prefix, target, "/worker-id/test%2F", null)
assert(rewrittenURI.toString() === "http://localhost:8081/test%2F")
rewrittenURI = JettyUtils.createProxyURI(prefix, target, "/worker-id/%F0%9F%98%84", null)
assert(rewrittenURI.toString() === "http://localhost:8081/%F0%9F%98%84")
rewrittenURI = JettyUtils.createProxyURI(prefix, target, "/worker-noid/json", null)
assert(rewrittenURI === null)
}
test("SPARK-33611: Avoid encoding twice on the query parameter of proxy rewrittenURI") {
val prefix = "/worker-id"
val target = "http://localhost:8081"
val path = "/worker-id/json"
val rewrittenURI =
JettyUtils.createProxyURI(prefix, target, path, "order%5B0%5D%5Bcolumn%5D=0")
assert(rewrittenURI.toString === "http://localhost:8081/json?order%5B0%5D%5Bcolumn%5D=0")
}
test("verify rewriting location header for reverse proxy") {
val clientRequest = mock(classOf[HttpServletRequest])
var headerValue = "http://localhost:4040/jobs"
val targetUri = URI.create("http://localhost:4040")
when(clientRequest.getScheme()).thenReturn("http")
when(clientRequest.getHeader("host")).thenReturn("localhost:8080")
when(clientRequest.getPathInfo()).thenReturn("/proxy/worker-id/jobs")
var newHeader = JettyUtils.createProxyLocationHeader(headerValue, clientRequest, targetUri)
assert(newHeader.toString() === "http://localhost:8080/proxy/worker-id/jobs")
headerValue = "http://localhost:4041/jobs"
newHeader = JettyUtils.createProxyLocationHeader(headerValue, clientRequest, targetUri)
assert(newHeader === null)
}
test("add and remove handlers with custom user filter") {
val (conf, securityMgr, sslOptions) = sslDisabledConf()
conf.set("spark.ui.filters", classOf[TestFilter].getName())
conf.set(s"spark.${classOf[TestFilter].getName()}.param.responseCode",
HttpServletResponse.SC_NOT_ACCEPTABLE.toString)
val serverInfo = JettyUtils.startJettyServer("0.0.0.0", 0, sslOptions, conf)
try {
val path = "/test"
val url = new URL(s"http://localhost:${serverInfo.boundPort}$path/root")
assert(TestUtils.httpResponseCode(url) === HttpServletResponse.SC_NOT_FOUND)
val (servlet, ctx) = newContext(path)
serverInfo.addHandler(ctx, securityMgr)
assert(TestUtils.httpResponseCode(url) === HttpServletResponse.SC_NOT_ACCEPTABLE)
// Try a request with bad content in a parameter to make sure the security filter
// is being added to new handlers.
val badRequest = new URL(
s"http://localhost:${serverInfo.boundPort}$path/root?bypass&invalid<=foo")
assert(TestUtils.httpResponseCode(badRequest) === HttpServletResponse.SC_OK)
assert(servlet.lastRequest.getParameter("invalid<") === null)
assert(servlet.lastRequest.getParameter("invalid<") !== null)
serverInfo.removeHandler(ctx)
assert(TestUtils.httpResponseCode(url) === HttpServletResponse.SC_NOT_FOUND)
} finally {
stopServer(serverInfo)
}
}
test("SPARK-32467: Avoid encoding URL twice on https redirect") {
val (conf, securityMgr, sslOptions) = sslEnabledConf()
val serverInfo = JettyUtils.startJettyServer("0.0.0.0", 0, sslOptions, conf)
try {
val serverAddr = s"http://localhost:${serverInfo.boundPort}"
val (_, ctx) = newContext("/ctx1")
serverInfo.addHandler(ctx, securityMgr)
TestUtils.withHttpConnection(new URL(s"$serverAddr/ctx%281%29?a%5B0%5D=b")) { conn =>
assert(conn.getResponseCode() === HttpServletResponse.SC_FOUND)
val location = Option(conn.getHeaderFields().get("Location"))
.map(_.get(0)).orNull
val expectedLocation = s"https://localhost:${serverInfo.securePort.get}/ctx(1)?a[0]=b"
assert(location == expectedLocation)
}
} finally {
stopServer(serverInfo)
}
}
test("http -> https redirect applies to all URIs") {
val (conf, securityMgr, sslOptions) = sslEnabledConf()
val serverInfo = JettyUtils.startJettyServer("0.0.0.0", 0, sslOptions, conf)
try {
Seq(newContext("/"), newContext("/test1")).foreach { case (_, ctx) =>
serverInfo.addHandler(ctx, securityMgr)
}
assert(serverInfo.server.getState === "STARTED")
val (_, testContext) = newContext("/test2")
serverInfo.addHandler(testContext, securityMgr)
val httpPort = serverInfo.boundPort
val tests = Seq(
("http", serverInfo.boundPort, HttpServletResponse.SC_FOUND),
("https", serverInfo.securePort.get, HttpServletResponse.SC_OK))
tests.foreach { case (scheme, port, expected) =>
val urls = Seq(
s"$scheme://localhost:$port/root",
s"$scheme://localhost:$port/test1/root",
s"$scheme://localhost:$port/test2/root")
urls.foreach { url =>
val rc = TestUtils.httpResponseCode(new URL(url))
assert(rc === expected, s"Unexpected status $rc for $url")
}
}
} finally {
stopServer(serverInfo)
}
}
test("specify both http and https ports separately") {
var socket: ServerSocket = null
var serverInfo: ServerInfo = null
try {
socket = new ServerSocket(0)
// Make sure the SSL port lies way outside the "http + 400" range used as the default.
val baseSslPort = Utils.userPort(socket.getLocalPort(), 10000)
val (conf, _, sslOptions) = sslEnabledConf(sslPort = Some(baseSslPort))
serverInfo = JettyUtils.startJettyServer("0.0.0.0", socket.getLocalPort() + 1,
sslOptions, conf, serverName = "server1")
val notAllowed = Utils.userPort(serverInfo.boundPort, 400)
assert(serverInfo.securePort.isDefined)
assert(serverInfo.securePort.get != Utils.userPort(serverInfo.boundPort, 400))
} finally {
stopServer(serverInfo)
closeSocket(socket)
}
}
test("redirect with proxy server support") {
val proxyRoot = "https://proxy.example.com:443/prefix"
val (conf, securityMgr, sslOptions) = sslDisabledConf()
conf.set(UI.PROXY_REDIRECT_URI, proxyRoot)
val serverInfo = JettyUtils.startJettyServer("0.0.0.0", 0, sslOptions, conf)
try {
val serverAddr = s"http://localhost:${serverInfo.boundPort}"
val redirect = JettyUtils.createRedirectHandler("/src", "/dst")
serverInfo.addHandler(redirect, securityMgr)
// Test with a URL handled by the added redirect handler, and also including a path prefix.
val headers = Seq("X-Forwarded-Context" -> "/prefix")
TestUtils.withHttpConnection(
new URL(s"$serverAddr/src/"),
headers = headers) { conn =>
assert(conn.getResponseCode() === HttpServletResponse.SC_FOUND)
val location = Option(conn.getHeaderFields().get("Location"))
.map(_.get(0)).orNull
assert(location === s"$proxyRoot/prefix/dst")
}
// Not really used by Spark, but test with a relative redirect.
val relative = JettyUtils.createRedirectHandler("/rel", "root")
serverInfo.addHandler(relative, securityMgr)
TestUtils.withHttpConnection(new URL(s"$serverAddr/rel/")) { conn =>
assert(conn.getResponseCode() === HttpServletResponse.SC_FOUND)
val location = Option(conn.getHeaderFields().get("Location"))
.map(_.get(0)).orNull
assert(location === s"$proxyRoot/rel/root")
}
} finally {
stopServer(serverInfo)
}
}
test("SPARK-34449: Jetty 9.4.35.v20201120 and later no longer return status code 302 " +
" and handle internally when request URL ends with a context path without trailing '/'") {
val proxyRoot = "https://proxy.example.com:443/prefix"
val (conf, securityMgr, sslOptions) = sslDisabledConf()
conf.set(UI.PROXY_REDIRECT_URI, proxyRoot)
val serverInfo = JettyUtils.startJettyServer("0.0.0.0", 0, sslOptions, conf)
try {
val (_, ctx) = newContext("/ctx")
serverInfo.addHandler(ctx, securityMgr)
val urlStr = s"http://localhost:${serverInfo.boundPort}/ctx"
assert(TestUtils.httpResponseCode(new URL(urlStr + "/")) === HttpServletResponse.SC_OK)
// If the following assertion fails when we upgrade Jetty, it seems to change the behavior of
// handling context path which doesn't have the trailing slash.
assert(TestUtils.httpResponseCode(new URL(urlStr)) === HttpServletResponse.SC_OK)
} finally {
stopServer(serverInfo)
}
}
test("SPARK-34449: default thread pool size of different jetty servers") {
val (conf, _, sslOptions) = sslDisabledConf()
Seq(10, 200, 500, 1000).foreach { poolSize =>
val serverInfo = JettyUtils.startJettyServer("0.0.0.0", 0, sslOptions, conf, "", poolSize)
try {
val pool = serverInfo.server.getThreadPool.asInstanceOf[QueuedThreadPool]
val leasedThreads = pool.getThreadPoolBudget.getLeasedThreads
assert(pool.getMaxThreads === math.max(leasedThreads + 1, poolSize),
"we shall meet the basic requirement for jetty to be responsive")
} finally {
stopServer(serverInfo)
}
}
}
test("SPARK-36237: Attach and start handler after application started in UI ") {
def newSparkContextWithoutUI(): SparkContext = {
val conf = new SparkConf()
.setMaster("local")
.setAppName("test")
.set(UI.UI_ENABLED, false)
new SparkContext(conf)
}
withSpark(newSparkContextWithoutUI()) { sc =>
assert(sc.ui.isEmpty)
val sparkUI = SparkUI.create(Some(sc), sc.statusStore, sc.conf, sc.env.securityManager,
sc.appName, "", sc.startTime)
sparkUI.bind()
assert(TestUtils.httpResponseMessage(new URL(sparkUI.webUrl + "/jobs"))
=== "Spark is starting up. Please wait a while until it's ready.")
sparkUI.attachAllHandler()
assert(TestUtils.httpResponseMessage(new URL(sparkUI.webUrl + "/jobs")).contains(sc.appName))
sparkUI.stop()
}
}
/**
* Create a new context handler for the given path, with a single servlet that responds to
* requests in `$path/root`.
*/
private def newContext(path: String): (CapturingServlet, ServletContextHandler) = {
val servlet = new CapturingServlet()
val ctx = new ServletContextHandler()
ctx.setContextPath(path)
val servletHolder = new ServletHolder(servlet)
ctx.addServlet(servletHolder, "/root")
ctx.addServlet(servletHolder, "/")
(servlet, ctx)
}
def stopServer(info: ServerInfo): Unit = {
if (info != null) info.stop()
}
def closeSocket(socket: ServerSocket): Unit = {
if (socket != null) socket.close
}
/** Test servlet that exposes the last request object for GET calls. */
private class CapturingServlet extends HttpServlet {
@volatile var lastRequest: HttpServletRequest = _
override def doGet(req: HttpServletRequest, res: HttpServletResponse): Unit = {
lastRequest = req
res.sendError(HttpServletResponse.SC_OK)
}
}
}
// Filter for testing; returns a configurable code for every request.
private[spark] class TestFilter extends Filter {
private var rc: Int = HttpServletResponse.SC_OK
override def destroy(): Unit = { }
override def init(config: FilterConfig): Unit = {
if (config.getInitParameter("responseCode") != null) {
rc = config.getInitParameter("responseCode").toInt
}
}
override def doFilter(req: ServletRequest, res: ServletResponse, chain: FilterChain): Unit = {
if (req.getParameter("bypass") == null) {
res.asInstanceOf[HttpServletResponse].sendError(rc, "Test.")
} else {
chain.doFilter(req, res)
}
}
}
| ueshin/apache-spark | core/src/test/scala/org/apache/spark/ui/UISuite.scala | Scala | apache-2.0 | 19,503 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.runtime.batch.sql
import org.apache.flink.api.scala.ExecutionEnvironment
import org.apache.flink.table.api.internal.TableEnvironmentInternal
import org.apache.flink.table.api.scala._
import org.apache.flink.table.runtime.utils.{CommonTestData, TableProgramsCollectionTestBase}
import org.apache.flink.table.runtime.utils.TableProgramsTestBase.TableConfigMode
import org.apache.flink.test.util.TestBaseUtils
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.Parameterized
import scala.collection.JavaConverters._
@RunWith(classOf[Parameterized])
class TableSourceITCase(
configMode: TableConfigMode)
extends TableProgramsCollectionTestBase(configMode) {
@Test
def testCsvTableSource(): Unit = {
val csvTable = CommonTestData.getCsvTableSource
val env = ExecutionEnvironment.getExecutionEnvironment
val tEnv = BatchTableEnvironment.create(env, config)
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("csvTable", csvTable)
val results = tEnv.sqlQuery(
"SELECT id, `first`, `last`, score FROM csvTable").collect()
val expected = Seq(
"1,Mike,Smith,12.3",
"2,Bob,Taylor,45.6",
"3,Sam,Miller,7.89",
"4,Peter,Smith,0.12",
"5,Liz,Williams,34.5",
"6,Sally,Miller,6.78",
"7,Alice,Smith,90.1",
"8,Kelly,Williams,2.34").mkString("\\n")
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testCsvTableSourceWithEmptyColumn(): Unit = {
val csvTable = CommonTestData.getCsvTableSourceWithEmptyColumn
val env = ExecutionEnvironment.getExecutionEnvironment
val tEnv = BatchTableEnvironment.create(env, config)
tEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal("csvTable", csvTable)
val results = tEnv.sqlQuery(
"SELECT id, `first`, `last`, score FROM csvTable").collect()
val expected = Seq(
"1,Mike,Smith,12.3",
"2,Bob,Taylor,null",
"null,Leonard,null,null").mkString("\\n")
TestBaseUtils.compareResultAsText(results.asJava, expected)
}
@Test
def testNested(): Unit = {
val env = ExecutionEnvironment.getExecutionEnvironment
val tableEnv = BatchTableEnvironment.create(env, config)
val nestedTable = CommonTestData.getNestedTableSource
tableEnv.asInstanceOf[TableEnvironmentInternal].registerTableSourceInternal(
"NestedPersons", nestedTable)
val result = tableEnv.sqlQuery("SELECT NestedPersons.firstName, NestedPersons.lastName," +
"NestedPersons.address.street, NestedPersons.address.city AS city " +
"FROM NestedPersons " +
"WHERE NestedPersons.address.city LIKE 'Dublin'").collect()
val expected = "Bob,Taylor,Pearse Street,Dublin"
TestBaseUtils.compareResultAsText(result.asJava, expected)
}
}
| hequn8128/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/runtime/batch/sql/TableSourceITCase.scala | Scala | apache-2.0 | 3,650 |
package org.jetbrains.plugins.scala.annotator
class ScCharLiteralAnnotatorTest extends ScalaHighlightingTestBase {
def testEmptyCharLiteral(): Unit = {
val scalaText =
"""
|val test = ''
""".stripMargin
assertMatches(errorsFromScalaCode(scalaText)){
case Error("''", "Missing char value") :: Nil =>
}
}
}
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/annotator/ScCharLiteralAnnotatorTest.scala | Scala | apache-2.0 | 349 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogdebugger.ui
import cogdebugger.ProbeManager
import scala.swing._
/** Implements a toolbar with a combo box for controlling the rate at which
* probes refresh. Spawns a new thread to call the manager's readProbes method
* at the set rate. */
class ProbeToolbar(manager: ProbeManager) extends BoxPanel(Orientation.Horizontal) {
import ProbeToolbar._
val rateBox = new ComboBox(rates)
rateBox.selection.item = 20
rateBox.maximumSize = rateBox.preferredSize
listenTo(rateBox.selection)
reactions += {
case event.SelectionChanged(`rateBox`) =>
manager.probeDriver.updatesPerSec = rateBox.selection.item
}
contents += new Label("Frames/sec:")
contents += Swing.HStrut(10)
contents += rateBox
//contents += Swing.HGlue // Take up the extra space
}
object ProbeToolbar {
private val rates = Seq[Double](60, 30, 20, 10, 5, 2, 1, 0.5, 0.1)
} | hpe-cct/cct-core | src/main/scala/cogdebugger/ui/ProbeToolbar.scala | Scala | apache-2.0 | 1,513 |
package exercise.ex2
object SortUtil extends App {
def isSorted[A](as: Array[A], ordered: (A, A) => Boolean): Boolean = {
@annotation.tailrec
def loop(n: Int): Boolean = {
if (n >= as.length - 1) true
else if (ordered(as(n), as(n + 1))) loop(n + 1)
else false
}
loop(0)
}
}
| pluselc/fpinscala | src/main/scala/exercise/ex2/SortUtil.scala | Scala | mit | 314 |
import sbt._, Keys._
import bintray.BintrayKeys._
object Publish {
val coreSettings = Seq(
bintrayOrganization := Some("iheartradio"),
bintrayPackageLabels := Seq("play-framework", "swagger", "rest-api", "API", "documentation"),
publishMavenStyle := true,
licenses := Seq("Apache-2.0" -> url("https://www.apache.org/licenses/LICENSE-2.0.html")),
homepage := Some(url("http://iheartradio.github.io/play-swagger")),
scmInfo := Some(ScmInfo(
url("https://github.com/iheartradio/play-swagger"),
"[email protected]:iheartradio/play-swagger.git")),
pomIncludeRepository := { _ ⇒ false },
publishArtifact in Test := false)
val sbtPluginSettings = Seq(
licenses := Seq("Apache-2.0" -> url("https://www.apache.org/licenses/LICENSE-2.0.html")),
publishMavenStyle := false,
bintrayOrganization := Some("iheartradio"))
}
| kbedel/play-swagger | project/Publish.scala | Scala | apache-2.0 | 871 |
package dotty.tools.dotc.config
object Printers {
class Printer {
def println(msg: => String): Unit = System.out.println(msg)
def echo[T](msg: => String, value: T): T = { println(msg + value); value }
}
object noPrinter extends Printer {
override def println(msg: => String): Unit = ()
override def echo[T](msg: => String, value: T): T = value
}
val default: Printer = new Printer
val core: Printer = noPrinter
val typr: Printer = noPrinter
val constr: Printer = noPrinter
val checks: Printer = noPrinter
val overload: Printer = noPrinter
val implicits: Printer = noPrinter
val implicitsDetailed: Printer = noPrinter
val subtyping: Printer = noPrinter
val unapp: Printer = noPrinter
val gadts = noPrinter
val hk = noPrinter
val variances = noPrinter
val incremental = noPrinter
val config = noPrinter
val transforms = noPrinter
val completions = noPrinter
val cyclicErrors = noPrinter
val pickling = noPrinter
}
| reactormonk/dotty | src/dotty/tools/dotc/config/Printers.scala | Scala | bsd-3-clause | 978 |
package bbc.locator
import scala.concurrent.duration._
import io.gatling.core.Predef._
import io.gatling.http.Predef._
class ReverseGeocode extends Simulation {
val httpProtocol = http
.baseURL("https://open.stage.bbc.co.uk")
.acceptHeader("application/xml")
val longLat = csv("locator/ds_030-points-21000-3dp.csv").circular
val scn = scenario("Reverse Geocode")
.feed(longLat)
.exec(http("Reverse Geocode")
.get("/locator/locations?la=${latitude}&longitude=${longitude}")
.check(status.is(200)))
setUp(scn.inject(
rampUsersPerSec(10) to(500) during(10 minutes),
constantUsersPerSec(500) during(20 minutes)
).protocols(httpProtocol))
}
| bestscarper/gatling-load-tests | src/test/scala/bbc/locator/ReverseGeocode.scala | Scala | apache-2.0 | 686 |
package scife
package enumeration
package benchmarks
import org.scalameter.api._
import org.scalameter.reporting.DsvReporter
import scife.{ enumeration => e }
import dependent._
import scife.util._
import scife.util.logging._
trait DependentMemoizedBenchmark[I, DepEnumType] extends PerformanceTest.OfflineReport
with java.io.Serializable with HasLogger {
import structures._
import memoization.MemoizationScope
import memoization.scope._
// @transient override lazy val reporter = new DsvReporter(',')
def getScope = new AccumulatingScope
val defaultContext =
suite.BenchmarkSuite.contextMinimal
//Context.empty
def fixtureRun(
benchmarkMainName: String,
name: String,
maxSize: Int,
run: String,
maxSizeWarmup: Option[Int] = None,
memScope: MemoizationScope = getScope
// ,
// constructEnumerator: MemoizationScope => DepEnumType = (ms: MemoizationScope) => this.constructEnumerator(ms),
// generator: Int => Gen[I] = this.generator,
// warmUp: (DepEnumType, Int) => Any = this.warmUp,
// measureCode: DepEnumType => I => Any = this.measureCode,
// setUp: (I, DepEnumType, MemoizationScope) => Any = this.setUpFixed
)(
implicit configArguments: org.scalameter.Context = defaultContext
) = {
require(name != null)
val warmupSize = maxSizeWarmup.getOrElse(maxSize)
performance of benchmarkMainName in {
measure method run in {
// val memScope = new AccumulatingScope
val enumerator = constructEnumerator(memScope)
using( generator(maxSize) ) config (
configArguments
) curve (name) warmUp {
System.gc()
System.gc()
warmUp(enumerator, warmupSize)
System.gc()
System.gc()
} setUp {
setUpFixed(_, enumerator, memScope)
} tearDown {
tearDownFixed(_, enumerator, memScope)
} in measureCode( enumerator )
}
}
}
def fixture(
benchmarkMainName: String,
name: String,
maxSize: Int,
maxSizeWarmup: Option[Int] = None,
memScope: MemoizationScope = getScope
// ,
// constructEnumerator: MemoizationScope => DepEnumType = (ms: MemoizationScope) => this.constructEnumerator(ms),
// generator: Int => Gen[I] = this.generator,
// warmUp: (DepEnumType, Int) => Any = this.warmUp,
// measureCode: DepEnumType => I => Any = this.measureCode,
// setUp: (I, DepEnumType, MemoizationScope) => Any = this.setUpFixed
)(
implicit configArguments: org.scalameter.Context = defaultContext
) = {
require(name != null)
val warmupSize = maxSizeWarmup.getOrElse(maxSize)
performance of benchmarkMainName in {
// val memScope = new AccumulatingScope
val enumerator = constructEnumerator(memScope)
using( generator(maxSize) ) config (
configArguments
) curve (name) warmUp {
System.gc()
System.gc()
warmUp(enumerator, warmupSize)
System.gc()
System.gc()
} setUp {
setUpFixed(_, enumerator, memScope)
} tearDown {
tearDownFixed(_, enumerator, memScope)
} in measureCode( enumerator )
}
}
// def getUsing(generator: Gen[I], enumerator: DepEnumType, memScope: MemoizationScope): super.Using[I] =
// using(generator) config (
// exec.jvmcmd -> javaCommand,
// exec.jvmflags -> JVMFlags.mkString(" ")
// ) curve (name) warmUp {
// warmUp(enumerator)
// } setUp {
// setUpFixed(_, enumerator, memScope)
// } tearDown {
// tearDownFixed(_, enumerator, memScope)
// }
def measureCode(tdEnum: DepEnumType): I => _
def generator(maxSize: Int): Gen[I]
def warmUp(tdEnum: DepEnumType, maxSize: Int): Any
def setUp(i: I, tdEnum: DepEnumType, memScope: MemoizationScope) {}
def setUpFixed(i: I, tdEnum: DepEnumType, memScope: MemoizationScope) {
setUp(i: I, tdEnum: DepEnumType, memScope: MemoizationScope)
System.gc
System.gc
System.gc
memScope.clear
System.gc
System.gc
System.gc
info("[DependentBenchmark:] Begin run")
}
def tearDown(i: I, tdEnum: DepEnumType, memScope: MemoizationScope): Unit = {}
final def tearDownFixed(i: I, tdEnum: DepEnumType, memScope: MemoizationScope) {
tearDown(i, tdEnum, memScope)
System.gc
System.gc
info("[DependentBenchmark:] End run")
}
def constructEnumerator(implicit ms: MemoizationScope): DepEnumType
// @transient override lazy val reporter = new LoggingReporter
}
| kaptoxic/SciFe | src/test/scala/scife/enumeration/benchmarks/DependentBenchmark.scala | Scala | gpl-2.0 | 4,547 |
package app
import service._
import jp.sf.amateras.scalatra.forms._
import util.Implicits._
import util.StringUtil._
import util.Keys
class SignInController extends SignInControllerBase with SystemSettingsService with AccountService
trait SignInControllerBase extends ControllerBase { self: SystemSettingsService with AccountService =>
case class SignInForm(userName: String, password: String)
val form = mapping(
"userName" -> trim(label("Username", text(required))),
"password" -> trim(label("Password", text(required)))
)(SignInForm.apply)
get("/signin"){
val redirect = params.get("redirect")
if(redirect.isDefined && redirect.get.startsWith("/")){
session.setAttribute(Keys.Session.Redirect, redirect.get)
}
html.signin(loadSystemSettings())
}
post("/signin", form){ form =>
authenticate(loadSystemSettings(), form.userName, form.password) match {
case Some(account) => signin(account)
case None => redirect("/signin")
}
}
get("/signout"){
session.invalidate
redirect("/")
}
/**
* Set account information into HttpSession and redirect.
*/
private def signin(account: model.Account) = {
session.setAttribute(Keys.Session.LoginAccount, account)
updateLastLoginDate(account.userName)
session.getAndRemove[String](Keys.Session.Redirect).map { redirectUrl =>
if(redirectUrl.replaceFirst("/$", "") == request.getContextPath){
redirect("/")
} else {
redirect(redirectUrl)
}
}.getOrElse {
redirect("/")
}
}
} | loveshell/gitbucket | src/main/scala/app/SignInController.scala | Scala | apache-2.0 | 1,636 |
object igame_stub {
def main(args: Array[String]) {
// Put code here
}
}
| LoyolaChicagoBooks/introcs-scala-examples | igame_stub/igame_stub.scala | Scala | gpl-3.0 | 81 |
package blog
import skinny.orm._, feature._
import scalikejdbc._
import org.joda.time._
case class Tag(
id: Long,
name: String,
createdAt: DateTime,
updatedAt: Option[DateTime] = None
)
object Tag extends SkinnyCRUDMapper[Tag] with TimestampsFeature[Tag] {
override val connectionPoolName = Symbol("blog")
override val tableName = "tags"
override val defaultAlias = createAlias("t")
override def extract(rs: WrappedResultSet, rn: ResultName[Tag]): Tag = autoConstruct(rs, rn)
}
| skinny-framework/skinny-framework | orm/src/test/scala/blog/Tag.scala | Scala | mit | 521 |
package com.geishatokyo.sqlgen.validator
import com.geishatokyo.sqlgen.SQLGenException
import com.geishatokyo.sqlgen.loader.CSVLoader
import com.geishatokyo.sqlgen.meta.{ColumnMeta, ExportStrategy, Metadata, SheetMeta}
import org.scalatest.{FlatSpec, Matchers}
/**
* Created by takezoux2 on 2017/07/06.
*/
class MetadataValidatorTest extends FlatSpec with Matchers {
class MyValidator extends MetadataValidator {
override def metadataName: String = "MySQL"
}
it should "throw exception if field not found" in {
val meta = Metadata("MySQL",List(
SheetMeta("User", List(
ColumnMeta("id"),
ColumnMeta("nickname"),
ColumnMeta("age"),
ColumnMeta("loginTime")
))
))
meta.columnNotFoundExportStrategy = ExportStrategy.ThrowException
// loginTimeフィールドが欠落している
val wb = new CSVLoader().loadFromString("User",
"""id,nickname,age
|1,hoge,23
|2,fuga,26
""".stripMargin)
wb.addMetadata(meta)
assertThrows[SQLGenException] {
new MyValidator().applyMetadata(wb)
}
}
}
| geishatokyo/sql-generator | src/test/scala/com/geishatokyo/sqlgen/validator/MetadataValidatorTest.scala | Scala | mit | 1,113 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.directed
import cc.factorie.model._
import cc.factorie.variable.{MutableVar, Var}
trait DirectedFactor extends Factor {
type ChildType <: Var
def child: ChildType
def parents: Seq[Var]
//def pr(s:StatisticsType): Double
def pr: Double // = pr(statistics)
//def logpr(s:StatisticsType): Double = math.log(pr(s))
def logpr: Double = math.log(pr) // logpr(statistics)
//def score: Double = logpr
//def sampledValue(s:StatisticsType): Any
def sampledValue(implicit random: scala.util.Random): Any // = sampledValue(statistics)
// TODO Consider removing these methods because we'd have specialized code in the inference recipes.
/** Update sufficient statistics in collapsed parents, using current value of child, with weight. Return false on failure. */
// TODO Consider passing a second argument which is the value of the child to use in the update
def updateCollapsedParents(weight:Double): Boolean = throw new Error(factorName+": Collapsing parent not implemented in " + this.getClass.getName)
def updateCollapsedChild(): Boolean = throw new Error(factorName+": Collapsing child not implemented.")
def resetCollapsedChild(): Boolean = throw new Error(factorName+": Resetting child not implemented.")
}
class GeneratedVarWrapper[V<:Var](val v:V) {
/** Create a new DirectedFactor, make it the "parent" generating factor for this variable,
and add this new factor to the given model. */
def ~[V2<:Var](partialFactor: V2 => DirectedFactor)(implicit model:MutableDirectedModel): V = {
model += partialFactor(v.asInstanceOf[V2])
v
}
}
class GeneratedMutableVarWrapper[V<:MutableVar](val v:V) {
/** Create a new DirectedFactor, make it the "parent" generating factor for this variable,
add this new factor to the given model,
and also assign the variable a new value randomly drawn from this factor. */
def :~[V2<:Var](partialFactor: V2 => DirectedFactor)(implicit model:MutableDirectedModel, random: scala.util.Random): V = {
model += partialFactor(v.asInstanceOf[V2])
v.set(model.parentFactor(v).sampledValue.asInstanceOf[v.Value])(null)
v
}
}
trait RealGeneratingFactor extends DirectedFactor {
def sampleDouble: Double
def pr(x:Double): Double
def logpr(x:Double): Double
}
trait IntGeneratingFactor extends DirectedFactor {
def sampleInt: Int
def pr(x:Int): Double
def logpr(x:Int): Double
}
abstract class DirectedFactorWithStatistics1[C<:Var](override val _1:C) extends FactorWithStatistics1[C](_1) with DirectedFactor {
type ChildType = C
def child = _1
def parents: Seq[Var] = Nil
def score(v1:C#Value): Double = logpr(v1:C#Value)
def pr(v1:C#Value): Double
def logpr(v1:C#Value): Double = math.log(pr(v1))
def pr: Double = pr(_1.value.asInstanceOf[C#Value])
override def sampledValue(implicit random: scala.util.Random): C#Value
}
abstract class DirectedFactorWithStatistics2[C<:Var,P1<:Var](override val _1:C, override val _2:P1) extends TupleFactorWithStatistics2[C,P1](_1, _2) with DirectedFactor {
type ChildType = C
def child = _1
def parents = Seq(_2)
def score(v1:C#Value, v2:P1#Value): Double = logpr(v1, v2)
def pr(v1:C#Value, v2:P1#Value): Double
def logpr(v1:C#Value, v2:P1#Value): Double = math.log(pr(v1, v2))
def pr: Double = pr(_1.value.asInstanceOf[C#Value], _2.value.asInstanceOf[P1#Value])
def sampledValue(p1:P1#Value)(implicit random: scala.util.Random): C#Value
def sampledValue(implicit random: scala.util.Random): C#Value = sampledValue(_2.value.asInstanceOf[P1#Value])
// TODO Consider this:
//def parents = _2 match { case vars:Vars[Parameter] => vars; case _ => Seq(_2) }
}
abstract class DirectedFactorWithStatistics3[C<:Var,P1<:Var,P2<:Var](override val _1:C, override val _2:P1, override val _3:P2) extends TupleFactorWithStatistics3[C,P1,P2](_1, _2, _3) with DirectedFactor {
type ChildType = C
def child = _1
def parents = Seq(_2, _3)
def score(v1:C#Value, v2:P1#Value, v3:P2#Value): Double = logpr(v1, v2, v3)
def pr(v1:C#Value, v2:P1#Value, v3:P2#Value): Double
def logpr(v1:C#Value, v2:P1#Value, v3:P2#Value): Double = math.log(pr(v1, v2, v3))
def pr: Double = pr(_1.value.asInstanceOf[C#Value], _2.value.asInstanceOf[P1#Value], _3.value.asInstanceOf[P2#Value])
def sampledValue(p1:P1#Value, p2:P2#Value)(implicit random: scala.util.Random): C#Value
def sampledValue(implicit random: scala.util.Random): C#Value = sampledValue(_2.value.asInstanceOf[P1#Value], _3.value.asInstanceOf[P2#Value])
}
abstract class DirectedFactorWithStatistics4[C<:Var,P1<:Var,P2<:Var,P3<:Var](override val _1:C, override val _2:P1, override val _3:P2, override val _4:P3) extends TupleFactorWithStatistics4[C,P1,P2,P3](_1, _2, _3, _4) with DirectedFactor {
type ChildType = C
def child = _1
def parents = Seq(_2, _3, _4)
def score(v1:C#Value, v2:P1#Value, v3:P2#Value, v4:P3#Value): Double = logpr(v1, v2, v3, v4)
def pr(v1:C#Value, v2:P1#Value, v3:P2#Value, v4:P3#Value): Double
def logpr(v1:C#Value, v2:P1#Value, v3:P2#Value, v4:P3#Value): Double = math.log(pr(v1, v2, v3, v4))
def pr: Double = pr(_1.value.asInstanceOf[C#Value], _2.value.asInstanceOf[P1#Value], _3.value.asInstanceOf[P2#Value], _4.value.asInstanceOf[P3#Value])
def sampledValue(p1:P1#Value, p2:P2#Value, p3:P3#Value)(implicit random: scala.util.Random): C#Value
def sampledValue(implicit random: scala.util.Random): C#Value = sampledValue(_2.value.asInstanceOf[P1#Value], _3.value.asInstanceOf[P2#Value], _4.value.asInstanceOf[P3#Value])
}
trait DirectedFamily1[Child<:Var] {
type C = Child
abstract class Factor(override val _1:Child) extends DirectedFactorWithStatistics1[C](_1)
def newFactor(c:C): Factor
def apply(): C => Factor = newFactor(_)
}
trait DirectedFamily2[Child<:Var,Parent1<:Var] {
type C = Child
type P1 = Parent1
abstract class Factor(override val _1:Child, override val _2:Parent1) extends DirectedFactorWithStatistics2[C,P1](_1, _2)
def newFactor(c:C, p1:P1): Factor
def apply(p1: P1): C => Factor = newFactor(_, p1)
}
trait DirectedFamily3[Child<:Var,Parent1<:Var,Parent2<:Var] {
type C = Child
type P1 = Parent1
type P2 = Parent2
abstract class Factor(override val _1:Child, override val _2:Parent1, override val _3:Parent2) extends DirectedFactorWithStatistics3[C,P1,P2](_1, _2, _3)
def newFactor(c:C, p1:P1, p2:P2): Factor
def apply(p1: P1, p2: P2): C => Factor = newFactor(_, p1, p2)
}
trait DirectedFamily4[Child<:Var,Parent1<:Var,Parent2<:Var,Parent3<:Var] {
type C = Child
type P1 = Parent1
type P2 = Parent2
type P3 = Parent3
abstract class Factor(override val _1:Child, override val _2:Parent1, override val _3:Parent2, override val _4:Parent3) extends DirectedFactorWithStatistics4[C,P1,P2,P3](_1, _2, _3, _4)
def newFactor(c:C, p1:P1, p2:P2, p3:P3): Factor
def apply(p1: P1, p2: P2, p3: P3): C => Factor = newFactor(_, p1, p2, p3)
}
| patverga/factorie | src/main/scala/cc/factorie/directed/DirectedFactor.scala | Scala | apache-2.0 | 7,652 |
package repositories.storage.dao.events
import com.google.inject.{Inject, Singleton}
import models.storage.event.EventTypeRegistry.TopLevelEvents
import models.storage.event.EventTypeRegistry.TopLevelEvents.{
MoveNodeType,
MoveObjectType
}
import models.storage.event.move._
import no.uio.musit.MusitResults.MusitResult
import no.uio.musit.repositories.events.EventActions
import no.uio.musit.models._
import no.uio.musit.security.AuthenticatedUser
import play.api.Logger
import play.api.db.slick.DatabaseConfigProvider
import repositories.storage.dao.LocalObjectDao
import scala.concurrent.{ExecutionContext, Future}
@Singleton
class MoveDao @Inject()(
implicit
val dbConfigProvider: DatabaseConfigProvider,
val localObjectsDao: LocalObjectDao,
val ec: ExecutionContext
) extends StorageEventTableProvider
with EventActions
with StorageFacilityEventRowMappers[MoveEvent] {
val logger = Logger(classOf[MoveDao])
import profile.api._
/**
* Writes a new MoveEvent to the database table
*
* @param mid the MuseumId associated with the event
* @param moveEvent the MoveEvent to save
* @tparam A the type of MoveEvent to save
* @return the EventId given the event
*/
def insert[A <: MoveEvent](
mid: MuseumId,
moveEvent: A
)(implicit currUsr: AuthenticatedUser): Future[MusitResult[EventId]] =
moveEvent match {
case mn: MoveNode =>
insertEvent(mid, mn)(asRow)
case mo: MoveObject =>
insertEventWithAdditional(mid, mo)(asRow) { (event, eid) =>
localObjectsDao.storeLatestMoveAction(mid, eid, event)
}
}
/**
* Add several move events in one transactional batch.
*
* @param mid the MuseumId associated with the event
* @param moveEvents the MoveNode events to save
* @return the EventIds given to the saved events
*/
def batchInsertNodes(
mid: MuseumId,
moveEvents: Seq[MoveNode]
)(implicit currUsr: AuthenticatedUser): Future[MusitResult[Seq[EventId]]] = {
insertBatch[MoveNode](mid, moveEvents)((mid, row) => asRow(mid, row))
}
def batchInsertObjects(
mid: MuseumId,
moveEvents: Seq[MoveObject]
)(implicit currUsr: AuthenticatedUser): Future[MusitResult[Seq[EventId]]] = {
insertBatchWithAdditional(mid, moveEvents)((mid, row) => asRow(mid, row)) {
case (event, eid) =>
localObjectsDao.storeLatestMoveAction(mid, eid, event)
}
}
/**
* Find the MoveEvent with the given EventId
*
* @param mid the MuseumId associated with the event
* @param id the ID to lookup
* @return the MoveEvent that might be found
*/
def findById(
mid: MuseumId,
id: EventId
)(implicit currUsr: AuthenticatedUser): Future[MusitResult[Option[MoveEvent]]] =
findEventById[MoveEvent](mid, id) { row =>
TopLevelEvents.unsafeFromId(row._2) match {
case MoveNodeType =>
fromRow(row._1, row._6, row._9.flatMap(StorageNodeId.fromString), row._12)
case MoveObjectType =>
fromRow(row._1, row._6, row._9.flatMap(ObjectUUID.fromString), row._12)
case _ =>
None
}
}
/**
* List all MoveNode events for the given nodeId.
*
* @param mid the MuseumId associated with the nodeId and MoveNode
* @param nodeId the nodeId to find MoveNode for
* @param limit the number of results to return, defaults to all.
* @return a list of MoveNode
*/
def listForNode(
mid: MuseumId,
nodeId: StorageNodeId,
limit: Option[Int] = None
)(implicit currUsr: AuthenticatedUser): Future[MusitResult[Seq[MoveNode]]] =
listEvents[MoveNode, StorageNodeId](
mid,
nodeId,
MoveNodeType.id,
limit
)(
row =>
fromRow(row._1, row._6, row._9.flatMap(StorageNodeId.fromString), row._12)
.flatMap[MoveNode] {
case mn: MoveNode => Some(mn)
case mo: MoveObject => None
}
)
/**
* List all MoveObject events for the given objectUUID.
*
* @param mid the MuseumId associated with the objectUUID and MoveObject
* @param objectUUID the nodeId to find MoveNode for
* @param limit the number of results to return, defaults to all.
* @return a list of MoveObject
*/
def listForObject(
mid: MuseumId,
objectUUID: ObjectUUID,
limit: Option[Int] = None
)(implicit currUsr: AuthenticatedUser): Future[MusitResult[Seq[MoveObject]]] =
listEvents[MoveObject, ObjectUUID](
mid,
objectUUID,
MoveObjectType.id,
limit
)(
row =>
fromRow(row._1, row._6, row._9.flatMap(ObjectUUID.fromString), row._12)
.flatMap[MoveObject] {
case mn: MoveNode => None
case mo: MoveObject => Some(mo)
}
)
}
| MUSIT-Norway/musit | service_backend/app/repositories/storage/dao/events/MoveDao.scala | Scala | gpl-2.0 | 4,820 |
package org.gark87.yajom.macros
import org.gark87.yajom.base.BaseMapper
import language.experimental.macros
import scala.reflect.macros.Context
object Facade {
def yajomMap[T, F, M <: BaseMapper[_]](setter: (T) => _)(from: F)(implicit m: M): Unit = macro yajomMapImpl[T, F, M]
def yajom[T, M <: BaseMapper[_]](setter: (T) => _)(from: T)(implicit m: M): Unit = macro yajomImpl[T, M]
def yajomOption[T, M <: BaseMapper[_]](setter: (T) => _)(from: Option[T])(implicit m: M): Unit = macro optionImpl[T, M]
def createOnNull[F, M <: BaseMapper[_]](func: F)(implicit m: M): F = macro CreateOnNull.macroImpl[F, M]
def yajomImpl[T: c.WeakTypeTag, M <: BaseMapper[_]](c: reflect.macros.Context)(setter: c.Expr[(T) => _])(from: c.Expr[T])(m: c.Expr[M])
: c.Expr[Unit] = {
import c.universe._
val y = new YajomContext(c)
val objectFactoryType = m.actualType.asInstanceOf[TypeRef].args.head.asInstanceOf[y.c.Type]
val guards = y.c.Expr[(T) => _](y.createOnNull.process(y)(setter.asInstanceOf[y.c.Expr[(T) => _]], objectFactoryType))
c.Expr[Unit](reify {
import scala.reflect.ClassTag
guards.splice(from.splice)
}.tree)
}
def yajomMapImpl[T: c.WeakTypeTag, F: c.WeakTypeTag, M <: BaseMapper[_]](c: reflect.macros.Context)(setter: c.Expr[(T) => _])(from: c.Expr[F])(m: c.Expr[M])
: c.Expr[Unit] = {
import c.universe._
val thisRef = m match {
case Expr(Select(a, _)) => a
case _ => c.abort(c.enclosingPosition, "OOPS")
}
val fromType = from.actualType
val toType = setter.actualType match {
case TypeRef(_, _, q) => q.head
case _ => c.abort(c.enclosingPosition, "OOPS")
}
val y = new YajomContext(c)
val objectFactoryType = m.actualType.asInstanceOf[TypeRef].args.head.asInstanceOf[y.c.Type]
val freshName = newTermName(c.fresh("fromValue$"))
val nonTrivialName = newTermName(c.fresh("nonTrivial$"))
val freshIdent = c.Expr[Any](Ident(freshName))
val mapCall = c.Expr[Any](Apply(Select(thisRef, newTermName("map")), List(Ident(freshName), Ident(nonTrivialName))))
def createVal(name: TermName, value: Expr[_], tpe: Type) = ValDef(Modifiers(), name, TypeTree(tpe), value.tree)
val fromValueCalc = createVal(freshName, reify {
from.splice
}, fromType)
val defaultObject = y.creator.createDefaultObject(y)(toType.asInstanceOf[y.c.Type], objectFactoryType)
val nonTrivialCall = createVal(nonTrivialName, c.Expr[Any](defaultObject.asInstanceOf[c.Tree]), toType)
val nonTrivialIdent = c.Expr[T](Ident(nonTrivialName))
val mapCall2 = reify {
import scala.reflect.ClassTag
if (nonTrivialIdent.splice != null) {
mapCall.splice
setter.splice(nonTrivialIdent.splice)
}
}.tree
c.Expr[Unit](
Block(fromValueCalc, nonTrivialCall, mapCall2)
)
}
def optionImpl[T: c.WeakTypeTag, M <: BaseMapper[_]](c: reflect.macros.Context)(setter: c.Expr[(T) => _])(from: c.Expr[Option[T]])(m: c.Expr[M])
: c.Expr[Unit] = {
import c.universe._
val y = new YajomContext(c)
val objectFactoryType: y.c.Type = m.actualType.asInstanceOf[TypeRef].args.head.asInstanceOf[y.c.Type]
val guards = y.c.Expr[(T) => _](y.createOnNull.process(y)(setter.asInstanceOf[y.c.Expr[(T) => _]], objectFactoryType))
c.Expr[Unit](reify {
import scala.reflect.ClassTag
val option = from.splice
option match {
case Some(x) => guards.splice(x)
case None => null
}
}.tree)
}
}
| gark87/yajom | yajom-macros/src/main/scala/org/gark87/yajom/macros/Facade.scala | Scala | mit | 3,505 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller, Zhe Jin
* @version 1.2
* @date Sat Jul 2 01:27:00 EDT 2016
* @see LICENSE (MIT style license file).
*/
package scalation.analytics.classifier
import scala.collection.mutable
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `TabuFeatures` keeps track of pairs of features, so they are not
* repeatedly tried.
*/
class TabuFeatures
{
private val tabu = new mutable.Queue [Tuple2 [Int, Int]] () // the tabu list used in feature swapping
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Add the 'j'th and 'j+1'th element of 'featureOrder' into the tabu list.
* If the tabu list is too large, remove the oldest element.
* @param feature1 'j'th element of 'featureOrder'
* @param feature2 'j+1'th element of 'featureOrder'
* @param cutOff tabu list size
*/
def addTaboo(feature1: Int, feature2: Int, cutOff: Int)
{
tabu.enqueue ((feature1, feature2))
if (tabu.size > cutOff) tabu.dequeue
} // addTaboo
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Check if the 'j'th and 'j+1'th element of 'featureOrder' are in the tabu list.
* @param feature1 'j'th element of 'featureOrder'
* @param feature2 'j+1'th element of 'featureOrder'
*/
def notInTaboo(feature1: Int, feature2: Int): Boolean =
{
! (tabu contains (feature1, feature2)) && ! (tabu contains (feature2, feature1))
} // notInTaboo
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Clear all features.
*/
def clear () { tabu.clear () }
} // TabuFeatures class
| NBKlepp/fda | scalation_1.2/src/main/scala/scalation/analytics/classifier/TabuFeatures.scala | Scala | mit | 1,813 |
/*
* Copyright 2010-2014 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package mongodb
package record
package field
import java.util.regex.Pattern
import scala.xml.NodeSeq
import net.liftweb.common.{Box, Empty, Failure, Full}
import net.liftweb.http.js.JE.{JsNull, Str}
import net.liftweb.json.JsonAST._
import net.liftweb.json.{JsonParser, Printer}
import net.liftweb.mongodb.record._
import net.liftweb.record.{Field, FieldHelpers, MandatoryTypedField}
import net.liftweb.util.Helpers.tryo
class PatternField[OwnerType <: BsonRecord[OwnerType]](rec: OwnerType)
extends Field[Pattern, OwnerType]
with MandatoryTypedField[Pattern]
{
def owner = rec
def defaultValue = Pattern.compile("")
def setFromAny(in: Any): Box[Pattern] = in match {
case p: Pattern => setBox(Full(p))
case Some(p: Pattern) => setBox(Full(p))
case Full(p: Pattern) => setBox(Full(p))
case (p: Pattern) :: _ => setBox(Full(p))
case s: String => setFromString(s)
case Some(s: String) => setFromString(s)
case Full(s: String) => setFromString(s)
case null|None|Empty => setBox(defaultValueBox)
case f: Failure => setBox(f)
case o => setFromString(o.toString)
}
def setFromJValue(jvalue: JValue): Box[Pattern] = jvalue match {
case JNothing|JNull if optional_? => setBox(Empty)
case JObject(JField("$regex", JString(s)) :: JField("$flags", JInt(f)) :: Nil) =>
setBox(Full(Pattern.compile(s, f.intValue)))
case other => setBox(FieldHelpers.expectedA("JObject", other))
}
// parse String into a JObject
def setFromString(in: String): Box[Pattern] = tryo(JsonParser.parse(in)) match {
case Full(jv: JValue) => setFromJValue(jv)
case f: Failure => setBox(f)
case other => setBox(Failure("Error parsing String into a JValue: "+in))
}
def toForm: Box[NodeSeq] = Empty
def asJs = asJValue match {
case JNothing => JsNull
case jv => Str(Printer.compact(render(jv)))
}
def asJValue: JValue = valueBox.map(v => JsonRegex(v)) openOr (JNothing: JValue)
}
| sortable/framework | persistence/mongodb-record/src/main/scala/net/liftweb/mongodb/record/field/PatternField.scala | Scala | apache-2.0 | 2,573 |
// Wei Chen - K Mean Cluster Test
// 2016-06-04
import com.scalaml.TestData._
import com.scalaml.general.MatrixFunc._
import com.scalaml.algorithm.KMean
import org.scalatest.funsuite.AnyFunSuite
class KMeanSuite extends AnyFunSuite {
val km = new KMean()
test("KMean Test : Clustering Tiny Data") {
assert(km.clear())
assert(km.config(Map("k" -> 2, "iter" -> 100)))
val result = km.cluster(UNLABELED_TINY_DATA)
assert(arrayequal(result, LABEL_TINY_DATA))
}
test("KMean Test : Clustering Small Data") {
assert(km.clear())
assert(km.config(Map("k" -> 2, "iter" -> 100)))
val result = km.cluster(UNLABELED_SMALL_DATA)
assert(arrayequal(result, LABEL_SMALL_DATA))
}
test("KMean Test : Clustering Large Data - WRONG") {
assert(km.clear())
assert(km.config(Map("k" -> 2, "iter" -> 100)))
val result = km.cluster(UNLABELED_LARGE_DATA)
assert(!arrayequal(result, LABEL_LARGE_DATA))
}
test("KMean Test : Invalid Config") {
assert(km.clear())
assert(!km.config(Map("k" -> "test")))
}
}
| Wei-1/Scala-Machine-Learning | src/test/scala/algorithm/clustering/KMeanTest.scala | Scala | mit | 1,130 |
/*
* Copyright 2013 Akiyoshi Sugiki, University of Tsukuba
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kumoi.shell.aaa.bill
import kumoi.shell.aaa._
import kumoi.shell.aaa.ops._
import kumoi.shell.aaa.resource._
import kumoi.shell.or._
import kumoi.shell.cache._
/**
*
* @author Akiyoshi SUGIKI
*/
@billtype
@remote trait ColdRate extends ORMapper[ColdRate] {
//@invalidate(Array("name")) @nocache @update def name_=(n: String)
@invalidate(Array("name")) @nocache @update def name_=(na: (String, AAA))
} | axi-sugiki/kumoi | src/kumoi/shell/aaa/bill/ColdRate.scala | Scala | apache-2.0 | 1,039 |
/*
* Copyright 2020 Precog Data
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar.frontend.logicalplan
import slamdata.Predef._
import quasar._
import quasar.common.{CIName, JoinType, SortDir}
import quasar.common.data.Data
import quasar.contrib.cats.stateT._
import quasar.contrib.pathy._
import quasar.fp.ski._
import quasar.frontend.{logicalplan => lp}, lp.{LogicalPlan => LP}
import scala.Symbol
import cats.data.State
import matryoshka._
import matryoshka.implicits._
import scalaz.{State => _, _}, Scalaz._
import shapeless.{nat, Nat, Sized}
import shims.monadToScalaz
final class LogicalPlanR[T](implicit TR: Recursive.Aux[T, LP], TC: Corecursive.Aux[T, LP]) {
import quasar.std.StdLib, StdLib._
import quasar.time.TemporalPart
def read(path: FPath) = lp.read[T](path).embed
def constant(data: Data) = lp.constant[T](data).embed
def invoke[N <: Nat](func: GenericFunc[N], values: Func.Input[T, N]) =
Invoke(func, values).embed
def invoke1(func: GenericFunc[nat._1], v1: T) =
invoke[nat._1](func, Func.Input1(v1))
def invoke2(func: GenericFunc[nat._2], v1: T, v2: T) =
invoke[nat._2](func, Func.Input2(v1, v2))
def invoke3(func: GenericFunc[nat._3], v1: T, v2: T, v3: T) =
invoke[nat._3](func, Func.Input3(v1, v2, v3))
def joinSideName(name: Symbol) = lp.joinSideName[T](name).embed
def join(left: T, right: T, tpe: JoinType, cond: JoinCondition[T]) =
lp.join(left, right, tpe, cond).embed
def free(name: Symbol) = lp.free[T](name).embed
def let(name: Symbol, form: T, in: T) =
lp.let(name, form, in).embed
def sort(src: T, order: NonEmptyList[(T, SortDir)]) =
lp.sort(src, order).embed
def temporalTrunc(part: TemporalPart, src: T) =
lp.temporalTrunc(part, src).embed
object ArrayInflation {
def unapply[N <: Nat](func: GenericFunc[N]): Option[UnaryFunc] =
some(func) collect {
case structural.FlattenArray => structural.FlattenArray
case structural.FlattenArrayIndices => structural.FlattenArrayIndices
case structural.ShiftArray => structural.ShiftArray
case structural.ShiftArrayIndices => structural.ShiftArrayIndices
}
}
object MapInflation {
def unapply[N <: Nat](func: GenericFunc[N]): Option[UnaryFunc] =
some(func) collect {
case structural.FlattenMap => structural.FlattenMap
case structural.FlattenMapKeys => structural.FlattenMapKeys
case structural.ShiftMap => structural.ShiftMap
case structural.ShiftMapKeys => structural.ShiftMapKeys
}
}
// NB: this can't currently be generalized to Binder, because the key type
// isn't exposed there.
def renameƒ[M[_]: Monad](f: Symbol => M[Symbol])
: CoalgebraM[M, LP, (Map[Symbol, Symbol], T)] = {
case (bound, t) =>
t.project match {
case Let(sym, expr, body) =>
f(sym).map(sym1 =>
lp.let(sym1, (bound, expr), (bound + (sym -> sym1), body)))
case Free(sym) =>
lp.free(bound.get(sym).getOrElse(sym)).point[M]
case t => t.strengthL(bound).point[M]
}
}
def rename[M[_]: Monad](f: Symbol => M[Symbol])(t: T): M[T] =
(Map[Symbol, Symbol](), t).anaM[T](renameƒ(f))
def normalizeTempNames(t: T): T =
rename(κ(freshSym[State[Long, ?]]("tmp")))(t).runA(0).value
def bindFree(vars: Map[CIName, T])(t: T): T =
t.cata[T] {
case Free(sym) => vars.get(CIName(sym.name)).getOrElse((Free(sym):LP[T]).embed)
case other => other.embed
}
/** Per the following:
* 1. Successive Lets are re-associated to the right:
* (let a = (let b = x1 in x2) in x3) becomes
* (let b = x1 in (let a = x2 in x3))
* 2. Lets are "hoisted" outside of Invoke nodes:
* (add (let a = x1 in x2) (let b = x3 in x4)) becomes
* (let a = x1 in (let b = x3 in (add x2 x4))
* Note that this is safe only if all bound names are unique; otherwise
* it could create spurious shadowing. normalizeTempNames is recommended.
* NB: at the moment, Lets are only hoisted one level.
*/
@SuppressWarnings(Array("org.wartremover.warts.Equals"))
val normalizeLetsƒ: LP[T] => Option[LP[T]] = {
case Let(b, Embed(Let(a, x1, x2)), x3) =>
lp.let(a, x1, let(b, x2, x3)).some
// TODO generalize the following three `GenericFunc` cases
case InvokeUnapply(func @ UnaryFunc(_, _, _), Sized(a1)) => a1 match {
case Embed(Let(a, x1, x2)) =>
lp.let(a, x1, invoke[nat._1](func, Func.Input1(x2))).some
case _ => None
}
case InvokeUnapply(func @ BinaryFunc(_, _, _), Sized(a1, a2)) => (a1, a2) match {
case (Embed(Let(a, x1, x2)), a2) =>
lp.let(a, x1, invoke[nat._2](func, Func.Input2(x2, a2))).some
case (a1, Embed(Let(a, x1, x2))) =>
lp.let(a, x1, invoke[nat._2](func, Func.Input2(a1, x2))).some
case _ => None
}
// NB: avoids illegally rewriting the continuation
case InvokeUnapply(relations.Cond, Sized(a1, a2, a3)) => (a1, a2, a3) match {
case (Embed(Let(a, x1, x2)), a2, a3) =>
lp.let(a, x1, invoke[nat._3](relations.Cond, Func.Input3(x2, a2, a3))).some
case _ => None
}
case InvokeUnapply(func @ TernaryFunc(_, _, _), Sized(a1, a2, a3)) => (a1, a2, a3) match {
case (Embed(Let(a, x1, x2)), a2, a3) =>
lp.let(a, x1, invoke[nat._3](func, Func.Input3(x2, a2, a3))).some
case (a1, Embed(Let(a, x1, x2)), a3) =>
lp.let(a, x1, invoke[nat._3](func, Func.Input3(a1, x2, a3))).some
case (a1, a2, Embed(Let(a, x1, x2))) =>
lp.let(a, x1, invoke[nat._3](func, Func.Input3(a1, a2, x2))).some
case _ => None
}
case Join(l, r, tpe, cond) =>
(l, r) match {
case (Embed(Let(a, x1, x2)), r) =>
lp.let(a, x1, join(x2, r, tpe, cond)).some
case (l, Embed(Let(a, x1, x2))) =>
lp.let(a, x1, join(l, x2, tpe, cond)).some
case _ => None
}
case t => None
}
def normalizeLets(t: T) = t.transAna[T](repeatedly(normalizeLetsƒ))
/** The set of paths referenced in the given plan. */
def paths(lp: T): ISet[FPath] =
lp.foldMap(_.cata[ISet[FPath]] {
case Read(p) => ISet singleton p
case other => other.fold
})
/** The set of absolute paths referenced in the given plan. */
def absolutePaths(lp: T): ISet[APath] =
paths(lp) foldMap (p => ISet fromFoldable refineTypeAbs(p).swap)
}
| djspiewak/quasar | frontend/src/main/scala/quasar/frontend/logicalplan/LogicalPlanR.scala | Scala | apache-2.0 | 6,935 |
package worker
import scala.collection.immutable.Queue
import akka.actor.Actor
import akka.actor.Terminated
import akka.actor.ActorLogging
import akka.actor.ActorRef
import akka.contrib.pattern.DistributedPubSubExtension
import akka.contrib.pattern.DistributedPubSubMediator
import akka.contrib.pattern.DistributedPubSubMediator.Put
import scala.concurrent.duration.Deadline
import scala.concurrent.duration.FiniteDuration
import akka.actor.Props
object Master {
val ResultsTopic = "results"
def props(workTimeout: FiniteDuration): Props =
Props(classOf[Master], workTimeout)
case class Ack(workId: String)
private sealed trait WorkerStatus
private case object Idle extends WorkerStatus
private case class Busy(work: Work, deadline: Deadline) extends WorkerStatus
private case class WorkerState(ref: ActorRef, status: WorkerStatus, respondTo: ActorRef)
private case object CleanupTick
private case object StatsTick
}
class Master(workTimeout: FiniteDuration) extends Actor with ActorLogging {
import Master._
import MasterWorkerProtocol._
import scala.concurrent.duration._
val mediator = DistributedPubSubExtension(context.system).mediator
mediator ! Put(self)
private var workers = Map[String, WorkerState]()
//pending work along with who asked for it to be done
private var pendingWork = Queue[Tuple2[ActorRef,Work]]()
private var workIds = Set[String]()
import context.dispatcher
val cleanupTask = context.system.scheduler.schedule(workTimeout / 2, workTimeout / 2,
self, CleanupTick)
val statsTask = context.system.scheduler.schedule(0.seconds, 5.seconds, self, StatsTick)
override def postStop(): Unit = {
cleanupTask.cancel()
statsTask.cancel()
}
def receive = {
case RegisterWorker(workerId) =>
if (workers.contains(workerId)) {
workers += (workerId -> workers(workerId).copy(ref = sender))
} else {
log.debug("Worker registered: {}", workerId)
println("Registering worker" + sender())
workers += (workerId -> WorkerState(sender, status = Idle, respondTo = null))
context.watch(sender)
if (pendingWork.nonEmpty)
sender ! WorkIsReady
}
case Terminated(worker) => {
//this is really ineffecient
//need to look at changing the keys of workers to being ActorRef of the
//worker directly.
var oldKey: String = null
workers.foreach {
case (key,value) =>
if (value.ref == worker) {
oldKey = key
}
}
if (oldKey != null) {
workers -= oldKey
}
}
case WorkerRequestsWork(workerId) =>
if (pendingWork.nonEmpty) {
workers.get(workerId) match {
case Some(s @ WorkerState(_, Idle, _)) =>
val ((respondTo, work), rest) = pendingWork.dequeue
pendingWork = rest
log.debug("Giving worker {} some work {}", workerId, work.job)
// TODO store in Eventsourced
sender ! work
workers += (workerId -> s.copy(status = Busy(work, Deadline.now + workTimeout), respondTo = respondTo))
case _ =>
}
}
case WorkIsDone(workerId, workId, result) =>
workers.get(workerId) match {
case Some(s @ WorkerState(_, Busy(work, _), _)) if work.workId == workId =>
log.debug("Work is done: {} => {} by worker {}", work, result, workerId)
// TODO store in Eventsourced
val respondTo = s.respondTo
workers += (workerId -> s.copy(status = Idle, respondTo = null))
//mediator ! DistributedPubSubMediator.Publish(ResultsTopic, WorkResult(workId, result))
respondTo ! WorkResult(workId, result)
sender ! MasterWorkerProtocol.Ack(workId)
case _ =>
if (workIds.contains(workId)) {
// previous Ack was lost, confirm again that this is done
sender ! MasterWorkerProtocol.Ack(workId)
}
}
case WorkFailed(workerId, workId) =>
workers.get(workerId) match {
case Some(s @ WorkerState(_, Busy(work, _), _)) if work.workId == workId =>
log.info("Work failed: {}", work)
// TODO store in Eventsourced
val respondTo = s.respondTo
workers += (workerId -> s.copy(status = Idle, respondTo = null))
pendingWork = pendingWork enqueue(respondTo->work)
notifyWorkers()
case _ =>
}
case WorkRequest(work, respondTo) =>
// idempotent
if (workIds.contains(work.workId)) {
sender ! Master.Ack(work.workId)
} else {
log.debug("Accepted work: {}", work)
// TODO store in Eventsourced
pendingWork = pendingWork enqueue(respondTo -> work)
workIds += work.workId
sender ! Master.Ack(work.workId)
notifyWorkers()
}
case CleanupTick =>
for ((workerId, s @ WorkerState(_, Busy(work, timeout),_)) <- workers) {
if (timeout.isOverdue) {
log.info("Work timed out: {}", work)
// TODO store in Eventsourced
workers -= workerId
val respondTo = s.respondTo
pendingWork = pendingWork enqueue(respondTo -> work)
notifyWorkers()
}
}
case StatsTick => {
println("Registered workers:" + workers.keySet.size)
println("Pending work size:" + pendingWork.length)
}
}
def notifyWorkers(): Unit =
if (pendingWork.nonEmpty) {
// could pick a few random instead of all
workers.foreach {
case (_, WorkerState(ref, Idle, _)) => ref ! WorkIsReady
case _ => // busy
}
}
// TODO cleanup old workers
// TODO cleanup old workIds
} | sriddell/akka-distributed-workers | src/main/scala/worker/Master.scala | Scala | cc0-1.0 | 5,740 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.