code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Copyright (C) 2016 Lightbend Inc. <http://www.lightbend.com>
*/
package controllers
import play.api.{Configuration, Environment}
import play.api.mvc._
import uk.co.turingatemyhamster.shoppinglist.user.api.UserService
import scala.concurrent.{ExecutionContext, Future}
class Application(userService: UserService
)(implicit val config: Configuration,
env: Environment,
executionContext: ExecutionContext)
extends Controller
{
def redirectToLogin = Action {
Redirect("/login")
}
def index = Action.async { implicit request =>
request.body.asFormUrlEncoded.get.get("userId") match {
case None =>
println("Attempted to view main page with no userId")
Future.successful(Redirect("/login"))
case Some(userIds) if userIds.length == 1 =>
val userId = userIds.head
println(s"Viewing main page with one userId `$userId`")
userService.getUser(userId).invoke().map { u =>
println(s"Fetched user `$u`. Loading main page.")
Ok(views.html.index.render(u.userId, "ShoplistR", config, env))
}.recoverWith {
case t =>
println(s"No user found for `$userId`. Caught: $t")
Future.successful(Redirect("/login"))
}
// views generated code is merging parameter lists
case Some(userIds) =>
println("Attempting to view main page with multiple userIDs")
Future.successful(Redirect("/login"))
}
}
def signUp = Action {
Ok(views.html.signUp.render("ShoplistR", config, env))
}
def login = Action {
Ok(views.html.login.render("ShoplistR", config, env))
}
}
| drdozer/shoppinglist | web-ui/app/controllers/Application.scala | Scala | apache-2.0 | 1,686 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js Test Suite **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.testsuite.javalib.lang
import org.junit.Test
import org.junit.Assert._
import org.junit.Assume._
import java.lang.Math
import org.scalajs.testsuite.utils.Platform._
class MathTest {
@Test def abs(): Unit = {
assertEquals(0, Math.abs(0))
assertEquals(42, Math.abs(42))
assertEquals(42, Math.abs(-42))
assertTrue(Math.abs(0.0).equals(0.0))
assertTrue(Math.abs(-0.0).equals(0.0))
assertEquals(42.0, Math.abs(42.0), 0.0)
assertEquals(42.0, Math.abs(-42.0), 0.0)
assertEquals(Double.PositiveInfinity, Math.abs(Double.PositiveInfinity), 0.0)
assertEquals(Double.PositiveInfinity, Math.abs(Double.NegativeInfinity), 0.0)
assertTrue(Math.abs(Double.NaN).isNaN)
assertEquals(Long.MaxValue, Math.abs(Long.MaxValue))
assertEquals(Long.MinValue, Math.abs(Long.MinValue))
}
@Test def max(): Unit = {
assertEquals(0, Math.max(0, 0))
assertEquals(2, Math.max(0, 2))
assertEquals(2, Math.max(2, 0))
assertEquals(2.0, Math.max(0.0, 2.0), 0.0)
assertEquals(2.0, Math.max(2.0, 0.0), 0.0)
assertTrue(Math.max(0.0, 0.0).equals(0.0))
assertTrue(Math.max(-0.0, 0.0).equals(0.0))
assertTrue(Math.max(0.0, -0.0).equals(0.0))
assertTrue(Math.max(-0.0, -0.0).equals(-0.0))
assertEquals(Double.PositiveInfinity, Math.max(Double.PositiveInfinity, 0.0), 0.0)
assertEquals(0.0, Math.max(Double.NegativeInfinity, 0.0), 0.0)
assertTrue(Math.max(Double.NaN, 0.0).isNaN)
assertTrue(Math.max(0.0, Double.NaN).isNaN)
assertEquals(Long.MaxValue, Math.max(Long.MaxValue, 0))
assertEquals(0L, Math.max(Long.MinValue, 0))
}
@Test def min(): Unit = {
assertEquals(0, Math.min(0, 0))
assertEquals(0, Math.min(0, 2))
assertEquals(0, Math.min(2, 0))
assertEquals(0.0, Math.min(0.0, 2.0), 0.0)
assertEquals(0.0, Math.min(2.0, 0.0), 0.0)
assertTrue(Math.min(0.0, 0.0).equals(0.0))
assertTrue(Math.min(-0.0, 0.0).equals(-0.0))
assertTrue(Math.min(0.0, -0.0).equals(-0.0))
assertTrue(Math.min(-0.0, -0.0).equals(-0.0))
assertEquals(0.0, Math.min(Double.PositiveInfinity, 0.0), 0.0)
assertEquals(Double.NegativeInfinity, Math.min(Double.NegativeInfinity, 0.0), 0.0)
assertTrue(Math.min(Double.NaN, 0.0).isNaN)
assertTrue(Math.min(0.0, Double.NaN).isNaN)
assertEquals(0L, Math.min(Long.MaxValue, 0))
assertEquals(Long.MinValue, Math.min(Long.MinValue, 0))
}
@Test def cbrt(): Unit = {
assertTrue(1 / Math.cbrt(-0.0) < 0)
assertEquals(3.0, Math.cbrt(27.0), 0.0)
assertEquals(100.0, Math.cbrt(1000000.0), 0.0)
assertEquals(1000.0, Math.cbrt(1000000000.0), 0.0)
assertEquals(-100000000.0, Math.cbrt(-1.0E24), 0.0)
assertEquals(-4039.0E8, Math.cbrt(-65890311319.0E24), 0.0)
}
@Test def log1p(): Unit = {
assertTrue(Math.log1p(-2.0).isNaN)
assertTrue(Math.log1p(Double.NaN).isNaN)
assertEquals(0.0, Math.log1p(0.0), 0.0)
}
@Test def log10(): Unit = {
assertTrue(Math.log10(-230.0).isNaN)
assertTrue(Math.log10(Double.NaN).isNaN)
}
@Test def signum_for_Double(): Unit = {
assertEquals(1.0, Math.signum(234394.2198273), 0.0)
assertEquals(-1.0, Math.signum(-124937498.58), 0.0)
assertEquals(0.0, Math.signum(+0.0), 0.0)
assertTrue(1 / Math.signum(+0.0) > 0)
assertEquals(-0.0, Math.signum(-0.0), 0.0)
assertTrue(1 / Math.signum(-0.0) < 0)
assertTrue(Math.signum(Double.NaN).isNaN)
}
@Test def signum_for_Float(): Unit = {
assertEquals(1.0f, Math.signum(234394.2198273f), 0.0f)
assertEquals(-1.0f, Math.signum(-124937498.58f), 0.0f)
assertEquals(0.0f, Math.signum(+0.0f), 0.0f)
assertTrue(1 / Math.signum(+0.0f) > 0)
assertEquals(-0.0f, Math.signum(-0.0f), 0.0f)
assertTrue(1 / Math.signum(-0.0f) < 0)
assertTrue(Math.signum(Float.NaN).isNaN)
}
@Test def nextUp_for_Double(): Unit = {
assertEquals(Double.PositiveInfinity, Math.nextUp(Double.PositiveInfinity), 0.0)
assertEquals(Double.MinValue, Math.nextUp(Double.NegativeInfinity), 0.0)
assertEquals(Double.PositiveInfinity, Math.nextUp(Double.MaxValue), 0.0)
assertEquals(-1.7976931348623155e+308, Math.nextUp(-Double.MaxValue), 0.0)
assertEquals(Double.PositiveInfinity, Math.nextUp(-Double.MinValue), 0.0)
assertEquals(Double.MinPositiveValue, Math.nextUp(0.0), 0.0)
assertEquals(Double.MinPositiveValue, Math.nextUp(-0.0), 0.0)
assertEquals(9007199254740992.0, Math.nextUp(9007199254740991.0), 0.0)
assertEquals(9007199254740994.0, Math.nextUp(9007199254740992.0), 0.0)
assertEquals(1 + 2.2204460492503130808472633361816E-16, Math.nextUp(1.0), 0.0)
}
@Test def nextAfter_for_Double(): Unit = {
assertTrue(Math.nextAfter(1.0, Double.NaN).isNaN)
assertTrue(Math.nextAfter(Double.NaN, 1.0).isNaN)
assertEquals(0.0, Math.nextAfter(0.0, 0.0), 0.0)
assertEquals(-0.0, Math.nextAfter(0.0, -0.0), 0.0)
assertEquals(0.0, Math.nextAfter(-0.0, 0.0), 0.0)
assertEquals(-0.0, Math.nextAfter(-0.0, -0.0), 0.0)
assertEquals(Double.NegativeInfinity, Math.nextAfter(Double.MinValue, Double.NegativeInfinity), 0.0)
assertEquals(Double.PositiveInfinity, Math.nextAfter(-Double.MinValue, Double.PositiveInfinity), 0.0)
assertEquals(Double.MaxValue, Math.nextAfter(Double.PositiveInfinity, Double.NegativeInfinity), 0.0)
assertEquals(Double.MinValue, Math.nextAfter(Double.NegativeInfinity, Double.PositiveInfinity), 0.0)
assertEquals(Double.PositiveInfinity, Math.nextAfter(Double.MaxValue, Double.PositiveInfinity), 0.0)
assertEquals(Double.NegativeInfinity, Math.nextAfter(-Double.MaxValue, Double.NegativeInfinity), 0.0)
assertEquals(1.0, Math.nextAfter(1.0, 1.0), 0.0)
}
@Test def ulp_for_Double(): Unit = {
assertEquals(4.440892098500626E-16, Math.ulp(3.4), 0.0)
assertEquals(4.1718496795330275E93, Math.ulp(3.423E109), 0.0)
assertEquals(Double.MinPositiveValue, Math.ulp(0.0), 0.0)
}
@Test def hypot(): Unit = {
assertEquals(0.0, Math.hypot(0.0, 0.0), 0.01)
assertEquals(5.0, Math.hypot(3.0, 4.0), 0.01)
assertTrue(Math.hypot(3.0, Double.NaN).isNaN)
assertEquals(Double.PositiveInfinity, Math.hypot(Double.NegativeInfinity, 4.0), 0.0)
}
@Test def expm1(): Unit = {
assertTrue(1 / Math.expm1(-0.0) < 0)
assertEquals(0.0, Math.expm1(-0.0), 0.01)
assertEquals(19.085536923187668, Math.expm1(3.0), 0.01)
assertEquals(3269016.3724721107, Math.expm1(15.0), 0.01)
assertEquals(Double.PositiveInfinity, Math.expm1(1.8E10), 0.0)
assertEquals(Double.PositiveInfinity, Math.expm1(Double.PositiveInfinity), 0.0)
assertEquals(-1.0, Math.expm1(Double.NegativeInfinity), 0.01)
assertEquals(4.9E-324, Math.expm1(4.9E-324), 0.01)
}
@Test def sinh(): Unit = {
assertEquals(Double.NegativeInfinity, Math.sinh(-1234.56), 0.0)
assertEquals(Double.PositiveInfinity, Math.sinh(1234.56), 0.0)
assertEquals(0.0, Math.sinh(0.0), 0.01)
assertEquals(Double.PositiveInfinity, Math.sinh(Double.PositiveInfinity), 0.0)
}
@Test def cosh(): Unit = {
assertEquals(Double.PositiveInfinity, Math.cosh(-1234.56), 0.0)
assertEquals(Double.PositiveInfinity, Math.cosh(1234.56), 0.0)
assertEquals(1.0, Math.cosh(-0.0), 0.01)
assertEquals(Double.PositiveInfinity, Math.cosh(Double.PositiveInfinity), 0.0)
}
@Test def tanh(): Unit = {
assertEquals(-1.0, Math.tanh(-1234.56), 0.01)
assertEquals(-1.0, Math.tanh(-120.56), 0.01)
assertEquals(1.0, Math.tanh(1234.56), 0.01)
assertEquals(0.0, Math.tanh(0.0), 0.01)
assertEquals(1.0, Math.tanh(Double.PositiveInfinity), 0.01)
assertEquals(-1.0, Math.tanh(Double.NegativeInfinity), 0.01)
}
@Test def rint_for_Double(): Unit = {
// js.Math.round() is buggy on Rhino
assumeFalse("Assumed not executing in Rhino", executingInRhino)
import Math.rint
def isPosZero(x: Double): Boolean =
x == 0.0 && (1.0 / x) == Double.PositiveInfinity
def isNegZero(x: Double): Boolean =
x == 0.0 && (1.0 / x) == Double.NegativeInfinity
// Specials
assertTrue(isPosZero(rint(+0.0)))
assertTrue(isNegZero(rint(-0.0)))
assertEquals(Double.PositiveInfinity, rint(Double.PositiveInfinity), 0.0)
assertEquals(Double.NegativeInfinity, rint(Double.NegativeInfinity), 0.0)
assertTrue(rint(Double.NaN).isNaN)
// Positive values
assertTrue(isPosZero(rint(0.1)))
assertTrue(isPosZero(rint(0.5)))
assertEquals(1.0, rint(0.5000000000000001), 0.0)
assertEquals(1.0, rint(0.999), 0.0)
assertEquals(1.0, rint(1.4999999999999998), 0.0)
assertEquals(2.0, rint(1.5), 0.0)
assertEquals(2.0, rint(2.0), 0.0)
assertEquals(2.0, rint(2.1), 0.0)
assertEquals(2.0, rint(2.5), 0.0)
assertEquals(Double.MaxValue, rint(Double.MaxValue), 0.0)
assertEquals(4503599627370496.0, rint(4503599627370495.5), 0.0) // MaxSafeInt / 2
// Negative values
assertTrue(isNegZero(rint(-0.1)))
assertTrue(isNegZero(rint(-0.5)))
assertEquals(-1.0, rint(-0.5000000000000001), 0.0)
assertEquals(-1.0, rint(-0.999), 0.0)
assertEquals(-1.0, rint(-1.4999999999999998), 0.0)
assertEquals(-2.0, rint(-1.5), 0.0)
assertEquals(-2.0, rint(-2.0), 0.0)
assertEquals(-2.0, rint(-2.1), 0.0)
assertEquals(-2.0, rint(-2.5), 0.0)
assertEquals(Double.MinValue, rint(Double.MinValue), 0.0)
assertEquals(-4503599627370496.0, rint(-4503599627370495.5), 0.0) // -MaxSafeInt / 2
}
}
| lrytz/scala-js | test-suite/shared/src/test/scala/org/scalajs/testsuite/javalib/lang/MathTest.scala | Scala | bsd-3-clause | 9,949 |
package unfiltered.response
/** Tells the binding implementation to treat the request as non-matching */
object Pass extends ResponseFunction[Any] {
type RF = ResponseFunction[Any]
def apply[T](res: HttpResponse[T]) = res
/** Promote given intent to one defined for all requests. Returns
* Pass where given intent is not defined. */
def lift[A, B >: RF](intent: PartialFunction[A,B]) =
onPass(intent, Function.const(Pass) _)
/**
* Similar to PartialFunction#orElse, but is Pass-aware. If intent
* is not defined or returns Pass, the onPass function is attempted.
*
* This function is also implicitly defined as a method of
* PartialFunction when unfiltered.response._ is imported. */
def onPass[A, B >: RF, A1 <: A, B1 >: B](
intent: PartialFunction[A,B],
onPass: PartialFunction[A1, B1]
): PartialFunction[A1, B1] =
new OnPassAttempt(asAttempt(intent), asAttempt(onPass))
/**
* Similar to onPass for partial functions, but for an onPass
* handler that is defined for all requests.
*
* This function is also implicitly defined as a method of
* PartialFunction when unfiltered.response._ is imported. */
def onPass[A, B >: RF, A1 <: A, B1 >: B](
intent: PartialFunction[A,B],
onPass: Function1[A1, B1]
): PartialFunction[A1, B1] =
new OnPassAttempt(asAttempt(intent), new FunctionAttempt(onPass))
/**
* Handle the passing and the matching case in new function.
*
* This function is also implicitly defined as a method of
* PartialFunction when unfiltered.response._ is imported. */
def fold[A, B, C](
intent: PartialFunction[A,B],
onPass: A => C,
andThen: (A, B) => C
): PartialFunction[A, C] = new FunctionAttempt(
(a: A) =>
asAttempt(intent).attempt(a).map { b =>
andThen(a, b)
}.getOrElse {
onPass(a)
}
)
private def asAttempt[A,B](pf: PartialFunction[A,B]): Attempt[A,B] =
pf match {
case pa: Attempt[_,_] => pa
case pf: PartialFunction[_,_] => new PartialAttempt(pf)
}
private trait Attempt[-A,+B] extends PartialFunction[A,B]{
def attempt(x: A): Option[B]
}
private trait PassingAttempt[-A,+B] extends Attempt[A,B]{
def attemptWithPass(x: A): Option[B]
def attempt(x: A) = attemptWithPass(x).filter { _ != Pass }
}
private class PartialAttempt[-A,+B](underlying: PartialFunction[A,B])
extends PassingAttempt[A,B] {
val lifted = underlying.lift
def isDefinedAt(x: A) = underlying.isDefinedAt(x)
def apply(x: A) = underlying(x)
def attemptWithPass(x: A) = lifted(x)
}
private class FunctionAttempt[-A,+B](underlying: A => B)
extends PassingAttempt[A,B] {
def isDefinedAt(x: A) = true
def apply(x: A) = underlying(x)
def attemptWithPass(x: A) = Some(underlying(x))
}
private class OnPassAttempt[A,B >: RF,A1 <: A, B1 >: B](
left: Attempt[A,B],
right: Attempt[A1,B1]
) extends Attempt[A1,B1] {
def isDefinedAt(x: A1): Boolean = {
left.isDefinedAt(x) || right.isDefinedAt(x)
}
def apply(x: A1): B1 = {
left.attempt(x) orElse {
right.attempt(x)
} getOrElse {
Pass
}
}
def attempt(x: A1): Option[B1] = {
left.attempt(x).orElse {
right.attempt(x)
}
}
}
}
| omarkilani/unfiltered | library/src/main/scala/response/pass.scala | Scala | mit | 3,295 |
package ipfix
import java.nio.ByteOrder
import akka.util.{ByteString, ByteIterator}
class ByteIterCounter(byteIter: ByteIterator) {
implicit val order = ByteOrder.BIG_ENDIAN
private var cnt = 0
def count = cnt
def hasNext: Boolean = byteIter.hasNext
def next() = { cnt += 1; byteIter.next() }
def getBytes(num: Int): Array[Byte] = { cnt += num; Array.fill(num)(next()) }
def getByte = { cnt += 1; byteIter.getByte }
def getShort = { cnt += 2; byteIter.getShort }
def getInt = { cnt += 4; byteIter.getInt }
def getLong = { cnt += 8; byteIter.getLong }
def getFloat = { cnt += 4; byteIter.getFloat }
def getDouble = { cnt += 8; byteIter.getFloat }
def getU8AsInt = u8ToInt(getByte)
def getU16AsInt = u16ToInt(getShort)
def getU32AsInt = u32ToInt(getInt)
def getU32AsLong = u32ToLong(getInt)
def getU64AsLong = u64ToLong(getLong)
def u8ToInt(u8: Byte): Int = {
if (u8 < 0) 255 + u8 + 1
else u8
}
def u16ToInt(u16: Short): Int = {
if (u16 < 0) 65535 + u16 + 1
else u16
}
def u32ToInt(u32: Int): Int = {
if (u32 < 0) 2147483647
else u32
}
def u32ToLong(u32: Int): Long = {
if (u32 < 0) 4294967295L + u32 + 1
else u32
}
def u64ToLong(u64: Long): Long = {
if (u64 < 0L) 9223372036854775807L
else u64
}
}
object ByteIterCounter {
def apply(byteString: ByteString) = new ByteIterCounter(byteString.iterator)
} | ConnorDillon/ipfix | src/main/scala/ipfix/ByteIterCounter.scala | Scala | gpl-3.0 | 1,410 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// Licence: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.indexer
import com.google.common.io.ByteStreams
import org.apache.commons.vfs2.FileObject
import scala.tools.scalap.scalax.rules.scalasig._
class ClassfileDepickler(file: FileObject) {
val scalasig: Option[ScalaSig] = depickle
/** Uses scalap to produce a scala reflective view of the classfile */
private def depickle: Option[ScalaSig] = {
val in = file.getContent.getInputStream
try {
val bytes = ByteStreams.toByteArray(in)
val byteCode = ByteCode(bytes)
val classFile = ClassFileParser.parse(byteCode)
ScalaSigParser.parse(classFile)
} catch {
// ClassFileParser fails to parse some JDK class files
case e: Exception => None
} finally in.close()
}
def getTypeAliases: Seq[RawType] = {
scalasig match {
case Some(sig: ScalaSig) =>
sig.symbols.flatMap {
case s: AliasSymbol => Some(RawType(symbolName(s), access(s)))
case _ => None
}
case None => Nil
}
}
private def access(sym: Symbol): Access = {
if (sym.isPrivate) Private
else if (sym.isProtected) Protected
else Public
}
private def symbolName(a: Symbol): String = {
a.parent match {
case Some(s: SymbolInfoSymbol) => symbolName(s) + "$" + a.name
case Some(s: Symbol) => s.toString + "." + a.name
case None => a.name
}
}
}
| j-mckitrick/ensime-sbt | src/sbt-test/ensime-sbt/ensime-server/core/src/main/scala/org/ensime/indexer/ClassfileDepickler.scala | Scala | apache-2.0 | 1,502 |
package se.gigurra.leavu3.datamodel
import com.github.gigurra.heisenberg.MapData._
import com.github.gigurra.heisenberg.{Schema, Parsed}
case class SensorAngles(source: SourceData = Map.empty) extends SafeParsed[SensorAngles.type] {
val azimuth = parse(schema.azimuth).toDegrees
val elevation = parse(schema.elevation).toDegrees
}
object SensorAngles extends Schema[SensorAngles] {
val azimuth = required[Float]("azimuth", default = 0)
val elevation = required[Float]("elevation", default = 0)
}
| GiGurra/leavu3 | src/main/scala/se/gigurra/leavu3/datamodel/SensorAngles.scala | Scala | mit | 512 |
package com.twitter.finagle
import com.twitter.util.{NonFatal, Future, Time}
/**
* A [[Filter]] acts as a decorator/transformer of a [[Service service]].
* It may apply transformations to the input and output of that service:
* {{{
* (* MyService *)
* [ReqIn -> (ReqOut -> RepIn) -> RepOut]
* }}}
* For example, you may have a POJO service that takes Strings and
* parses them as Ints. If you want to expose this as a Network
* Service via Thrift, it is nice to isolate the protocol handling
* from the business rules. Hence you might have a Filter that
* converts back and forth between Thrift structs. Again, your service
* deals with POJOs:
* {{{
* [ThriftIn -> (String -> Int) -> ThriftOut]
* }}}
*
* Thus, a `Filter[A, B, C, D]` converts a `Service[C, D]` to a `Service[A, B]`.
* In other words, it converts a `Service[ReqOut, RepIn]` to a
* `Service[ReqIn, RepOut]`.
*
*/
abstract class Filter[-ReqIn, +RepOut, +ReqOut, -RepIn]
extends ((ReqIn, Service[ReqOut, RepIn]) => Future[RepOut])
{
/**
* This is the method to override/implement to create your own Filter.
*
* @param request the input request type
* @param service a service that takes the output request type and the input response type
*
*/
def apply(request: ReqIn, service: Service[ReqOut, RepIn]): Future[RepOut]
/**
* Chains a series of filters together:
*
* {{{
* myModularService = handleExceptions.andThen(thrift2Pojo.andThen(parseString))
* }}}
*
* '''Note:''' synchronously thrown exceptions in the underlying service are automatically
* lifted into Future.exception.
*
* @param next another filter to follow after this one
*/
def andThen[Req2, Rep2](next: Filter[ReqOut, RepIn, Req2, Rep2]) =
new Filter[ReqIn, RepOut, Req2, Rep2] {
def apply(request: ReqIn, service: Service[Req2, Rep2]) = {
val svc: Service[ReqOut, RepIn] = new Service[ReqOut, RepIn] with Proxy {
// note that while `Service.rescue` could be used here it would
// entail an extra allocation.
def apply(request: ReqOut): Future[RepIn] = {
try {
next(request, service)
} catch {
case NonFatal(e) => Future.exception(e)
}
}
def self = service
override def close(deadline: Time) = service.close(deadline)
override def status = service.status
override def toString() = service.toString()
}
Filter.this.apply(request, svc)
}
}
/**
* Terminates a filter chain in a service. For example,
*
* {{{
* myFilter.andThen(myService)
* }}}
* @param service a service that takes the output request type and the input response type.
*/
def andThen(service: Service[ReqOut, RepIn]): Service[ReqIn, RepOut] = {
val svc = Service.rescue(service)
new Service[ReqIn, RepOut] {
def apply(request: ReqIn) = Filter.this.apply(request, svc)
override def close(deadline: Time) = service.close(deadline)
override def status = service.status
}
}
def andThen(f: ReqOut => Future[RepIn]): ReqIn => Future[RepOut] = {
val service = Service.mk(f)
req => Filter.this.apply(req, service)
}
def andThen(factory: ServiceFactory[ReqOut, RepIn]): ServiceFactory[ReqIn, RepOut] =
new ServiceFactory[ReqIn, RepOut] {
val fn: Service[ReqOut, RepIn] => Service[ReqIn, RepOut] =
svc => Filter.this.andThen(svc)
def apply(conn: ClientConnection): Future[Service[ReqIn, RepOut]] =
factory(conn).map(fn)
def close(deadline: Time) = factory.close(deadline)
override def status = factory.status
override def toString() = factory.toString()
}
/**
* Conditionally propagates requests down the filter chain. This may
* useful if you are statically wiring together filter chains based
* on a configuration file, for instance.
*
* @param condAndFilter a tuple of boolean and filter.
*/
def andThenIf[Req2 >: ReqOut, Rep2 <: RepIn](
condAndFilter: (Boolean, Filter[ReqOut, RepIn, Req2, Rep2])
): Filter[ReqIn, RepOut, Req2, Rep2] =
condAndFilter match {
case (true, filter) => andThen(filter)
case (false, _) => this
}
}
/**
* A [[Filter]] where the request and reply types are the same.
*/
abstract class SimpleFilter[Req, Rep] extends Filter[Req, Rep, Req, Rep]
object Filter {
implicit def canStackFromSvc[Req, Rep]
: CanStackFrom[Filter[Req, Rep, Req, Rep], Service[Req, Rep]] =
new CanStackFrom[Filter[Req, Rep, Req, Rep], Service[Req, Rep]] {
def toStackable(_role: Stack.Role, filter: Filter[Req, Rep, Req, Rep]) =
new Stack.Module0[Service[Req, Rep]] {
val role = _role
val description = role.name
def make(next: Service[Req, Rep]) = filter andThen next
}
}
implicit def canStackFromFac[Req, Rep]
: CanStackFrom[Filter[Req, Rep, Req, Rep], ServiceFactory[Req, Rep]] =
new CanStackFrom[Filter[Req, Rep, Req, Rep], ServiceFactory[Req, Rep]] {
def toStackable(_role: Stack.Role, filter: Filter[Req, Rep, Req, Rep]) =
new Stack.Module0[ServiceFactory[Req, Rep]] {
val role = _role
val description = role.name
def make(next: ServiceFactory[Req, Rep]) = filter andThen next
}
}
/**
* TypeAgnostic filters are like SimpleFilters but they leave the Rep and Req types unspecified
* until `toFilter` is called.
*/
trait TypeAgnostic {
def toFilter[Req, Rep]: Filter[Req, Rep, Req, Rep]
def andThen(next: TypeAgnostic): TypeAgnostic = new TypeAgnostic {
def toFilter[Req, Rep] = toFilter[Req, Rep].andThen(next.toFilter[Req, Rep])
}
}
def identity[Req, Rep] = new SimpleFilter[Req, Rep] {
override def andThen[Req2, Rep2](next: Filter[Req, Rep, Req2, Rep2]) = next
override def andThen(service: Service[Req, Rep]) = service
override def andThen(factory: ServiceFactory[Req, Rep]) = factory
def apply(request: Req, service: Service[Req, Rep]) = service(request)
}
def mk[ReqIn, RepOut, ReqOut, RepIn](
f: (ReqIn, ReqOut => Future[RepIn]) => Future[RepOut]
): Filter[ReqIn, RepOut, ReqOut, RepIn] = new Filter[ReqIn, RepOut, ReqOut, RepIn] {
def apply(request: ReqIn, service: Service[ReqOut, RepIn]) = f(request, service)
}
/**
* Chooses a filter to apply based on incoming requests. If the given partial
* function is not defined at the request, then the request goes directly to
* the next service.
*
* @param pf a partial function mapping requests to Filters that should
* be applied
*/
def choose[Req, Rep](
pf: PartialFunction[Req, Filter[Req, Rep, Req, Rep]]
): Filter[Req, Rep, Req, Rep] = new Filter[Req, Rep, Req, Rep] {
private[this] val const: (Req => SimpleFilter[Req, Rep]) =
Function.const(Filter.identity[Req, Rep])
def apply(request: Req, service: Service[Req, Rep]): Future[Rep] =
pf.applyOrElse(request, const)(request, service)
}
}
| jay-johnson/finagle | finagle-core/src/main/scala/com/twitter/finagle/Filter.scala | Scala | apache-2.0 | 7,083 |
package com.alvinalexander.macspeechserver
object Resources {
// male voices
val ALEX_VOICE = "Alex"
val DANIEL_VOICE = "Daniel"
val LEE_VOICE = "Lee"
val OLIVER_VOICE = "Oliver"
// female voices
val FIONA_VOICE = "Fiona"
val MOIRA_VOICE = "Moira"
val KAREN_VOICE = "Karen"
val KATE_VOICE = "Kate"
val SAMANTHA_VOICE = "Samantha"
val SUSAN_VOICE = "Susan"
val VICKI_VOICE = "Vicki"
// speaking delays (ms)
val END_OF_PARAGRAPH_DELAY = 1000 // was 1000
val END_OF_SENTENCE_DELAY = 150 // was 250
val SEMI_COLON_PAUSE = 400 // was 400
}
/**
* in theory everything below here is something that both the Server and Clients need
*/
case class SpeakSentence(sentence: String, voice: String = "Alex")
//case class SpeakParagraphs(paragrahs: Seq[String], voice: String = "Alex")
//case class SpeakParagraphsWithMultipleVoices(paragrahs: Seq[String], voices: Seq[String])
/**
* a client can/will get this message back from the server when it's finished speaking
*/
case object SentenceSpeakerFinishedSpeaking
/**
* TODO: send this message to the server to get it to stop speaking
*/
//case object StopSpeaking
| alvinj/MacSpeechServer | src/main/scala/com/alvinalexander/macspeechserver/Resources.scala | Scala | gpl-3.0 | 1,234 |
package com.eevolution.context.dictionary.infrastructure.repository
import java.util.UUID
import com.eevolution.context.dictionary.domain._
import com.eevolution.context.dictionary.domain.model.Color
import com.eevolution.context.dictionary.infrastructure.db.DbContext._
import com.eevolution.utils.PaginatedSequence
import com.lightbend.lagom.scaladsl.persistence.jdbc.JdbcSession
import scala.concurrent.{ExecutionContext, Future}
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: [email protected], http://www.e-evolution.com , http://github.com/EmerisScala
* Created by [email protected] , www.e-evolution.com on 07/11/17.
*/
/**
* Color Repository
* @param session
* @param executionContext
*/
class ColorRepository (session: JdbcSession)(implicit executionContext: ExecutionContext)
extends api.repository.ColorRepository[Color , Int]
with ColorMapping {
def getById(id: Int): Future[Color] = {
Future(run(queryColor.filter(_.colorId == lift(id))).headOption.get)
}
def getByUUID(uuid: UUID): Future[Color] = {
Future(run(queryColor.filter(_.uuid == lift(uuid.toString))).headOption.get)
}
def getByColorId(id : Int) : Future[List[Color]] = {
Future(run(queryColor))
}
def getAll() : Future[List[Color]] = {
Future(run(queryColor))
}
def getAllByPage(page: Int, pageSize: Int): Future[PaginatedSequence[Color]] = {
val offset = page * pageSize
val limit = (page + 1) * pageSize
for {
count <- countColor()
elements <- if (offset > count) Future.successful(Nil)
else selectColor(offset, limit)
} yield {
PaginatedSequence(elements, page, pageSize, count)
}
}
private def countColor() = {
Future(run(queryColor.size).toInt)
}
private def selectColor(offset: Int, limit: Int): Future[Seq[Color]] = {
Future(run(queryColor).drop(offset).take(limit).toSeq)
}
}
| adempiere/ADReactiveSystem | dictionary-impl/src/main/scala/com/eevolution/context/dictionary/infrastructure/repository/ColorRepository.scala | Scala | gpl-3.0 | 2,621 |
/**
* Copyright 2011-2016 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.core.action
import io.gatling.core.stats.StatsEngine
import io.gatling.core.session.{ Expression, Session }
import io.gatling.core.util.NameGen
/**
* A conditional Action
*
* @constructor create an IfAction
* @param condition the condition that decides whether to execute thenNext or elseNext
* @param thenNext the action executed if condition evaluates to true
* @param elseNext the action executed if condition evaluates to false
* @param statsEngine the StatsEngine
* @param next the action executed if condition evaluates to false and elseNext equals None
*/
class If(condition: Expression[Boolean], thenNext: Action, elseNext: Action, val statsEngine: StatsEngine, val next: Action) extends ExitableAction with NameGen {
override val name: String = genName("if")
/**
* Evaluates the condition and decides what to do next
*
* @param session the session of the virtual user
*/
override def execute(session: Session): Unit = recover(session) {
condition(session).map { condition =>
val n = if (condition) thenNext else elseNext
n ! session
}
}
}
| GabrielPlassard/gatling | gatling-core/src/main/scala/io/gatling/core/action/If.scala | Scala | apache-2.0 | 1,738 |
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package sbt
import java.io.{ File, IOException, PrintWriter, StringWriter }
import java.net.InetAddress
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter
import java.time.temporal.ChronoUnit
import java.util.Hashtable
import java.util.concurrent.TimeUnit.NANOSECONDS
import scala.collection.mutable.ListBuffer
import scala.util.Properties
import scala.xml.{ Elem, Node => XNode, XML }
import testing.{
Event => TEvent,
NestedTestSelector,
Status => TStatus,
OptionalThrowable,
TestSelector
}
import util.Logger
import sbt.protocol.testing.TestResult
/**
* A tests listener that outputs the results it receives in junit xml
* report format.
* @param targetDir directory in which test reports are generated
*/
class JUnitXmlTestsListener(val targetDir: File, legacyTestReport: Boolean, logger: Logger)
extends TestsListener {
// These constructors are for binary compatibility with older versions of sbt
// Use old hard-coded behaviour for constructing `targetDir` from `outputDir`
def this(outputDir: String, legacyTestReport: Boolean, logger: Logger) =
this(new File(outputDir, "test-reports"), legacyTestReport, logger)
def this(outputDir: String, logger: Logger) = this(outputDir, false, logger)
def this(outputDir: String) = this(outputDir, false, null)
@deprecated("Provided for binary compatibility: please use `targetDir` instead", "1.6.0")
def outputDir: String = targetDir.getParent
/**Current hostname so we know which machine executed the tests*/
val hostname: String = {
val start = System.nanoTime
val name = try InetAddress.getLocalHost.getHostName
catch {
case _: IOException => "localhost"
}
val elapsed = System.nanoTime - start
if ((NANOSECONDS.toSeconds(elapsed) >= 4) && Properties.isMac && logger != null) {
logger.warn(
s"Getting the hostname $name was slow (${elapsed / 1.0e6} ms). " +
"This is likely because the computer's hostname is not set. You can set the " +
"""hostname with the command: scutil --set HostName "$(scutil --get LocalHostName).local"."""
)
}
name
}
/**all system properties as XML*/
val properties: Elem =
<properties>
{
// create a clone, defending against [[ConcurrentModificationException]]
val clonedProperties = System.getProperties.clone.asInstanceOf[Hashtable[AnyRef, AnyRef]]
val iter = clonedProperties.entrySet.iterator
val props: ListBuffer[XNode] = new ListBuffer()
while (iter.hasNext) {
val next = iter.next
props += <property name={next.getKey.toString} value={next.getValue.toString}/>
}
props
}
</properties>
/**
* Gathers data for one Test Suite. We map test groups to TestSuites.
* Each TestSuite gets its own output file.
*/
class TestSuite(val name: String, timestamp: LocalDateTime) {
def this(name: String) = this(name, LocalDateTime.now())
val events: ListBuffer[TEvent] = new ListBuffer()
/**Adds one test result to this suite.*/
def addEvent(e: TEvent): ListBuffer[TEvent] = events += e
/** Returns the number of tests of each state for the specified. */
def count(status: TStatus) = events.count(_.status == status)
/**
* Stops the time measuring and emits the XML for
* All tests collected so far.
*/
def stop(): Elem = {
val duration = events.map(_.duration()).sum
val (errors, failures, tests) = (count(TStatus.Error), count(TStatus.Failure), events.size)
// Junit XML reports don't differentiate between ignored, skipped or pending tests
val ignoredSkippedPending = count(TStatus.Ignored) + count(TStatus.Skipped) + count(
TStatus.Pending
)
// for sbt/junit-interface version 0.11 (in future versions this should be done there)
val classnameRegex = s"^($name|${name.split('.').last})\\\\.?".r
val result =
<testsuite hostname={hostname} name={name} tests={tests.toString} errors={errors.toString} failures={
failures + ""
} skipped={ignoredSkippedPending.toString} time={(duration / 1000.0).toString} timestamp={
formatISO8601DateTime(timestamp)
}>
{properties}
{
for (e <- events)
yield <testcase classname={
e.selector match {
case nested: NestedTestSelector => nested.suiteId()
case _ => name
}
} name={
e.selector match {
case selector: TestSelector =>
val matchEnd =
classnameRegex.findFirstMatchIn(selector.testName).map(_.end).getOrElse(0)
selector.testName.substring(matchEnd)
case nested: NestedTestSelector => nested.testName()
case other => s"(It is not a test it is a ${other.getClass.getCanonicalName})"
}
} time={(e.duration() / 1000.0).toString}>
{
val trace: String = if (e.throwable.isDefined) {
val stringWriter = new StringWriter()
val writer = new PrintWriter(stringWriter)
e.throwable.get.printStackTrace(writer)
writer.flush()
stringWriter.toString
} else {
""
}
e.status match {
case TStatus.Error if (e.throwable.isDefined) =>
<error message={e.throwable.get.getMessage} type={
e.throwable.get.getClass.getName
}>{trace}</error>
case TStatus.Error => <error message={"No Exception or message provided"}/>
case TStatus.Failure if (e.throwable.isDefined) =>
<failure message={e.throwable.get.getMessage} type={
e.throwable.get.getClass.getName
}>{trace}</failure>
case TStatus.Failure => <failure message={"No Exception or message provided"}/>
case TStatus.Ignored | TStatus.Skipped | TStatus.Pending => <skipped/>
case _ => {}
}
}
</testcase>
}
<system-out><![CDATA[]]></system-out>
<system-err><![CDATA[]]></system-err>
</testsuite>
result
}
}
/**The currently running test suite*/
private val testSuite = new InheritableThreadLocal[Option[TestSuite]] {
override def initialValue(): Option[TestSuite] = None
}
private def withTestSuite[T](f: TestSuite => T): T =
testSuite.get().map(f).getOrElse(sys.error("no test suite"))
/**Creates the output Dir*/
override def doInit(): Unit = {
val _ = targetDir.mkdirs()
}
/**
* Starts a new, initially empty Suite with the given name.
*/
override def startGroup(name: String): Unit = testSuite.set(Some(new TestSuite(name)))
/**
* Adds all details for the given even to the current suite.
*/
override def testEvent(event: TestEvent): Unit = for (e <- event.detail) {
withTestSuite(_.addEvent(e))
}
/**
* called for each class or equivalent grouping
* We map one group to one Testsuite, so for each Group
* we create [[https://github.com/windyroad/JUnit-Schema/blob/master/JUnit.xsd JUnit XML file]], and looks like this:
*
* <?xml version="1.0" encoding="UTF-8" ?>
* <testsuite skipped="w" errors="x" failures="y" tests="z" hostname="example.com" name="eu.henkelmann.bla.SomeTest" time="0.23" timestamp="2018-01-01T10:00:00">
* <properties>
* <property name="os.name" value="Linux" />
* ...
* </properties>
* <testcase classname="eu.henkelmann.bla.SomeTest" name="testFooWorks" time="0.0" >
* <error message="the foo did not work" type="java.lang.NullPointerException">... stack ...</error>
* </testcase>
* <testcase classname="eu.henkelmann.bla.SomeTest" name="testBarThrowsException" time="0.0" />
* <testcase classname="eu.henkelmann.bla.SomeTest" name="testBaz" time="0.0">
* <failure message="the baz was no bar" type="junit.framework.AssertionFailedError">...stack...</failure>
* </testcase>
* <system-out><![CDATA[]]></system-out>
* <system-err><![CDATA[]]></system-err>
* </testsuite>
*/
override def endGroup(name: String, t: Throwable): Unit = {
// create our own event to record the error
val event: TEvent = new TEvent {
def fullyQualifiedName = name
//def description =
//"Throwable escaped the test run of '%s'".format(name)
def duration = -1
def status = TStatus.Error
def fingerprint = null
def selector = null
def throwable = new OptionalThrowable(t)
}
withTestSuite(_.addEvent(event))
writeSuite()
}
/**
* Ends the current suite, wraps up the result and writes it to an XML file
* in the output folder that is named after the suite.
*/
override def endGroup(name: String, result: TestResult): Unit = {
writeSuite()
}
// Here we normalize the name to ensure that it's a nicer filename, rather than
// contort the user into not using spaces.
private[this] def normalizeName(s: String) = s.replaceAll("""\\s+""", "-")
/**
* Format the date, without milliseconds or the timezone, per the JUnit spec.
*/
private[this] def formatISO8601DateTime(d: LocalDateTime): String =
d.truncatedTo(ChronoUnit.SECONDS).format(DateTimeFormatter.ISO_LOCAL_DATE_TIME)
private def writeSuite(): Unit = {
val file = if (legacyTestReport) {
new File(targetDir, s"${normalizeName(withTestSuite(_.name))}.xml").getAbsolutePath
} else {
new File(targetDir, s"TEST-${normalizeName(withTestSuite(_.name))}.xml").getAbsolutePath
}
// TODO would be nice to have a logger and log this with level debug
// System.err.println("Writing JUnit XML test report: " + file)
val testSuiteResult = withTestSuite(_.stop())
XML.save(file, testSuiteResult, "UTF-8", xmlDecl = true, null)
testSuite.remove()
}
/**Does nothing, as we write each file after a suite is done.*/
override def doComplete(finalResult: TestResult): Unit = {}
/**Returns None*/
override def contentLogger(test: TestDefinition): Option[ContentLogger] = None
}
| xuwei-k/xsbt | testing/src/main/scala/sbt/JUnitXmlTestsListener.scala | Scala | apache-2.0 | 10,651 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalInteger, Input, MustBeNoneOrZeroOrPositive}
case class AC15(value: Option[Int]) extends CtBoxIdentifier(name = "Previous Cost of sales")
with CtOptionalInteger with MustBeNoneOrZeroOrPositive with Input
| keithhall/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/AC15.scala | Scala | apache-2.0 | 932 |
package controllers
import gov.dwp.carers.CADSHealthCheck
import monitoring.ProdHealthMonitor
import play.api.libs.json.{Json, Writes}
import play.api.mvc.{Action, Controller}
trait HealthController {
this: Controller =>
val healthMonitor = ProdHealthMonitor
implicit val healthWrites = new Writes[(String, CADSHealthCheck.Result)] {
def writes(healthCheck: (String, CADSHealthCheck.Result)) = Json.obj(
"application name" -> healthCheck._2.getApplication,
"version" -> healthCheck._2.getVersion,
"name" -> healthCheck._1,
"isHealthy" -> healthCheck._2.isHealthy
)
}
def healthReport = Action {
request =>
Ok(Json.prettyPrint(Json.toJson(healthMonitor.runHealthChecks()))).as("application/json").withHeaders("Cache-Control" -> "must-revalidate,no-cache,no-store")
}
def ping = Action {
request => Ok
}
}
object HealthController extends Controller with HealthController
| Department-for-Work-and-Pensions/RenderingService | app/controllers/HealthController.scala | Scala | mit | 938 |
package io.youi.path
case class LineTo(x: Double, y: Double) extends PathAction {
override def draw(context: Context, x: Double, y: Double, scaleX: Double, scaleY: Double): Unit = {
context.lineTo(x + (this.x * scaleX), y + (this.y * scaleY))
}
override def toString: String = s"LineTo(x: $x, y: $y)"
}
| outr/youi | ui/js/src/main/scala/io/youi/path/LineTo.scala | Scala | mit | 315 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.datasources.oap.index
import java.io._
import java.nio.ByteBuffer
import java.nio.channels.FileChannel
import org.apache.commons.io.IOUtils
import org.roaringbitmap.RoaringBitmap
import org.roaringbitmap.buffer.{ImmutableRoaringBitmap, MutableRoaringBitmap}
import org.scalatest.BeforeAndAfterEach
import org.apache.spark.sql.QueryTest
import org.apache.spark.sql.test.oap.SharedOapContext
import org.apache.spark.util.Utils
/**
* The usage for RoaringBitmap.
*/
class BitmapUsageSuite extends QueryTest with SharedOapContext with BeforeAndAfterEach {
private var dir: File = _
private var path: String = _
override def beforeEach(): Unit = {
dir = Utils.createTempDir()
path = dir.getAbsolutePath
}
override def afterEach(): Unit = {
dir.delete()
}
test("test how to serialize roaring bitmap to file and deserialize back") {
val rb1 = new RoaringBitmap()
val rb2 = new RoaringBitmap()
val rb3 = new RoaringBitmap()
(0 until 100000).foreach(rb1.add)
(100000 until 200000).foreach(element => rb2.add(3 * element))
(700000 until 800000).foreach(rb3.add)
val file = path + "roaringbitmaps.bin"
val out = new DataOutputStream(new FileOutputStream(file))
val headerLength = 4
out.writeInt(headerLength)
rb1.runOptimize()
rb2.runOptimize()
rb3.runOptimize()
rb1.serialize(out)
rb2.serialize(out)
rb3.serialize(out)
out.close()
// verify:
val int = new DataInputStream(new FileInputStream(file))
// The 4 is the four bytes for header length.
val headerLengthRead = int.readInt()
int.skip(rb1.serializedSizeInBytes + rb2.serializedSizeInBytes)
val rbtest3 = new RoaringBitmap()
rbtest3.deserialize(int)
if (!rbtest3.equals(rb3)) {
throw new RuntimeException("bug!")
}
}
test("test to use MutableRoaringBitmap and ImmutableRoarigBitmap " +
"to serialize to file and deserialize back") {
val rr1 = MutableRoaringBitmap.bitmapOf(1, 2, 3, 1000)
val rr2 = MutableRoaringBitmap.bitmapOf( 2, 3, 1010)
val file = path + "mutableroaringbitmaps.bin"
val dos = new DataOutputStream(new FileOutputStream(file))
val headerLength = 4
dos.writeInt(headerLength)
rr1.runOptimize()
rr2.runOptimize()
rr1.serialize(dos)
rr2.serialize(dos)
dos.close()
val bb = ByteBuffer.wrap(IOUtils.toByteArray(new FileInputStream(file)))
bb.position(4 + rr1.serializedSizeInBytes())
val rrback2 = new ImmutableRoaringBitmap(bb)
assert(rrback2 == rr2)
}
test("test to use memory map for roaring bitmaps") {
val tmpfile = File.createTempFile("roaring", "bin")
tmpfile.deleteOnExit()
val fos = new FileOutputStream(tmpfile)
val Bitmap1 = MutableRoaringBitmap.bitmapOf(0, 2, 55, 64, 1 << 30)
val Bitmap2 = MutableRoaringBitmap.bitmapOf(0, 2, 55, 654, 1 << 35)
val pos1 = 0 // bitmap 1 is at offset 0
Bitmap1.runOptimize()
Bitmap1.serialize(new DataOutputStream(fos))
val pos2 = Bitmap1.serializedSizeInBytes() // bitmap 2 will be right after it
Bitmap2.runOptimize()
Bitmap2.serialize(new DataOutputStream(fos))
val totalcount = fos.getChannel.position()
if (totalcount != Bitmap1.serializedSizeInBytes() + Bitmap2.serializedSizeInBytes()) {
throw new RuntimeException("This will not happen.")
}
fos.close()
val memoryMappedFile = new RandomAccessFile(tmpfile, "r")
val bb = memoryMappedFile.getChannel.map(FileChannel.MapMode.READ_ONLY, 0, totalcount)
memoryMappedFile.close() // we can safely close
bb.position(pos1)
val mapped1 = new ImmutableRoaringBitmap(bb)
bb.position(pos2)
val mapped2 = new ImmutableRoaringBitmap(bb)
assert(mapped1 == Bitmap1)
assert(mapped2 == Bitmap2)
}
}
| Intel-bigdata/OAP | oap-cache/oap/src/test/scala/org/apache/spark/sql/execution/datasources/oap/index/BitmapUsageSuite.scala | Scala | apache-2.0 | 4,605 |
package org.excala
import com.github.nscala_time.time.Imports._
object Implicits {
implicit val defaultTimeout = 200.millis
}
| edmundnoble/Excala | src/org/excala/Implicits.scala | Scala | bsd-3-clause | 130 |
package com.dragisak.typelevel
import org.scalatest.WordSpec
import org.scalatest.Matchers._
import NatToInt._
import Nat._
class NatSpec extends WordSpec {
"toInt converts Nat to Int" in {
toInt[Nat9] should be(9)
}
}
| dragisak/type-level | src/test/scala/com/dragisak/typelevel/NatSpec.scala | Scala | apache-2.0 | 233 |
import quoted.*
import scala.quoted.staging.*
object Test {
given Compiler = Compiler.make(getClass.getClassLoader)
def main(args: Array[String]): Unit = withQuotes {
val q = '{(q: Quotes) ?=>
val a = '{4}
${'{(q2: Quotes) ?=>
'{${a}}
}}
}
println(q.show)
}
}
| dotty-staging/dotty | tests/run-staging/quote-nested-5.scala | Scala | apache-2.0 | 308 |
package sisdn.admission.service
import akka.actor.ActorSystem
import akka.http.scaladsl.server._
import akka.stream.ActorMaterializer
import sisdn.admission.model.Student
import sisdn.admission.utils.JsonProtocol
object Main {
}
| mhashimm/sisdn-admission-service | src/main/scala/sisdn/admission/service/Main.scala | Scala | agpl-3.0 | 234 |
package edu.usc.irds.sparkler.storage.solr
import edu.usc.irds.sparkler.Constants
import edu.usc.irds.sparkler.storage.{StorageRDD, SparklerGroupPartition}
import edu.usc.irds.sparkler.model.{Resource, ResourceStatus, SparklerJob}
import org.apache.solr.client.solrj.{SolrClient, SolrQuery}
import org.apache.solr.client.solrj.util.ClientUtils.escapeQueryChars
import org.apache.spark.rdd.RDD
import org.apache.spark.{Partition, SparkContext, TaskContext}
import java.net.URL
class SolrDeepRDD(sc: SparkContext,
job: SparklerJob,
sortBy: String = SolrDeepRDD.DEFAULT_ORDER,
generateQry: String = SolrDeepRDD.DEFAULT_FILTER_QRY,
maxGroups: Int = SolrDeepRDD.DEFAULT_GROUPS,
topN: Int = SolrDeepRDD.DEFAULT_TOPN,
deepCrawlHosts: Array[String] = new Array[String](0))
extends RDD[Resource](sc, Seq.empty) {
assert(topN > 0)
assert(maxGroups > 0)
val storageFactory = job.getStorageFactory
override def compute(split: Partition, context: TaskContext): Iterator[Resource] = {
val partition: SparklerGroupPartition = split.asInstanceOf[SparklerGroupPartition]
val batchSize = 100
val query = new SolrQuery(generateQry)
var hostnameFilter = "hostname:''"
for(url <- deepCrawlHosts) {
try {
val hostname = new URL(url).getHost
hostnameFilter += s" OR hostname:$hostname"
} catch {
case e: Exception => print(s"Exception occured while getting host from $url")
}
}
query.addFilterQuery(hostnameFilter)
query.addFilterQuery(s"""${Constants.storage.PARENT}:"${escapeQueryChars(partition.group)}"""")
query.addFilterQuery(s"${Constants.storage.CRAWL_ID}:${job.id}")
query.set("sort", sortBy)
query.setRows(batchSize)
val proxy = storageFactory.getProxy
var client : SolrClient = null
try {
client = proxy.getClient().asInstanceOf[SolrClient]
} catch {
case e: ClassCastException => println("client is not SolrClient.")
}
new SolrResultIterator[Resource](client, query,
batchSize, classOf[Resource], closeClient = true, limit = topN)
}
override protected def getPartitions: Array[Partition] = {
val qry = new SolrQuery(generateQry)
qry.addFilterQuery(s"${Constants.storage.CRAWL_ID}:${job.id}")
qry.set("sort", sortBy)
qry.set("group", true)
qry.set("group.ngroups", true)
qry.set("group.field", Constants.storage.PARENT)
qry.set("group.limit", 0)
qry.setRows(maxGroups)
val proxy = storageFactory.getProxy
var client : SolrClient = null
try {
client = proxy.getClient().asInstanceOf[SolrClient]
} catch {
case e: ClassCastException => println("client is not SolrClient.")
}
val groupRes = client.query(qry).getGroupResponse.getValues.get(0)
val grps = groupRes.getValues
SolrDeepRDD.LOG.info(s"selecting ${grps.size()} out of ${groupRes.getNGroups}")
val res = new Array[Partition](grps.size())
for (i <- 0 until grps.size()) {
//TODO: improve partitioning : (1) club smaller domains, (2) support for multiple partitions for larger domains
res(i) = new SparklerGroupPartition(i, grps.get(i).getGroupValue)
}
proxy.close()
res
}
}
object SolrDeepRDD extends StorageRDD {
override val DEFAULT_ORDER = Constants.storage.DISCOVER_DEPTH + " asc," + Constants.storage.SCORE + " desc"
override val DEFAULT_FILTER_QRY = Constants.storage.STATUS + ":" + ResourceStatus.UNFETCHED
override val DEFAULT_GROUPS = 10
override val DEFAULT_TOPN = 1000
}
| USCDataScience/sparkler | sparkler-app/src/main/scala/edu/usc/irds/sparkler/storage/solr/SolrDeepRDD.scala | Scala | apache-2.0 | 3,658 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.twitter.zipkin.query
import com.twitter.zipkin.common.BinaryAnnotation
case class QueryRequest(
serviceName: String,
spanName: Option[String],
annotations: Option[Seq[String]],
binaryAnnotations: Option[Seq[BinaryAnnotation]],
endTs: Long,
limit: Int,
order: Order
) {
def hasAnnotations = {
(annotations.map { _.length } getOrElse 0) +
(binaryAnnotations.map { _.length } getOrElse 0) > 0
}
}
| pteichman/zipkin | zipkin-common/src/main/scala/com/twitter/zipkin/query/QueryRequest.scala | Scala | apache-2.0 | 1,039 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600j.v2
import uk.gov.hmrc.ct.box.CtBoxIdentifier
abstract class J7A extends CtBoxIdentifier(name = "Tax Avoidance 7 Date")
| pncampbell/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600j/v2/J7A.scala | Scala | apache-2.0 | 757 |
package org.apache.mesos.chronos.scheduler.mesos
import java.util.logging.Logger
import org.apache.mesos.chronos.scheduler.config.SchedulerConfiguration
import org.apache.mesos.chronos.scheduler.jobs._
import org.apache.mesos.chronos.scheduler.jobs.constraints.Constraint
import org.apache.mesos.chronos.utils.JobDeserializer
import com.google.inject.Inject
import mesosphere.mesos.util.FrameworkIdUtil
import org.apache.mesos.Protos._
import org.apache.mesos.{Protos, Scheduler, SchedulerDriver}
import org.joda.time.DateTime
import scala.annotation.tailrec
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.{Buffer, HashMap, HashSet}
/**
* Provides the interface to chronos. Receives callbacks from chronos when resources are offered, declined etc.
* @author Florian Leibert ([email protected])
*/
class MesosJobFramework @Inject()(
val mesosDriver: MesosDriverFactory,
val scheduler: JobScheduler,
val taskManager: TaskManager,
val config: SchedulerConfiguration,
val frameworkIdUtil: FrameworkIdUtil,
val taskBuilder: MesosTaskBuilder)
extends Scheduler {
val frameworkName = "chronos"
private[this] val log = Logger.getLogger(getClass.getName)
private var lastReconciliation = DateTime.now.plusSeconds(config.reconciliationInterval())
private var runningTasks = new mutable.HashMap[String, ChronosTask]
/* Overridden methods from MesosScheduler */
@Override
def registered(schedulerDriver: SchedulerDriver, frameworkID: FrameworkID, masterInfo: MasterInfo) {
import mesosphere.util.BackToTheFuture.Implicits.defaultTimeout
import scala.concurrent.ExecutionContext.Implicits.global
log.info("Registered")
log.info("Master info:" + masterInfo.toString)
frameworkIdUtil.store(frameworkID)
}
/* Overridden methods from MesosScheduler */
@Override
def reregistered(schedulerDriver: SchedulerDriver, masterInfo: MasterInfo) {
log.warning("Reregistered")
}
JobDeserializer.config = config
@Override
def disconnected(schedulerDriver: SchedulerDriver) {
log.warning("Disconnected")
}
def getReservedResources(offer: Offer): (Double, Double) = {
val resources = offer.getResourcesList.asScala
val reservedResources = resources.filter({ x => x.hasRole && x.getRole != "*"})
(
getScalarValueOrElse(reservedResources.find(x => x.getName == "cpus"), 0),
getScalarValueOrElse(reservedResources.find(x => x.getName == "mem"), 0)
)
}
def getScalarValueOrElse(opt: Option[Resource], value: Double): Double = {
opt.map(x => x.getScalar.getValue).getOrElse(value)
}
//TODO(FL): Persist the UPDATED task or job into ZK such that on failover / reload, we don't have to step through the
// entire task stream.
@Override
def resourceOffers(schedulerDriver: SchedulerDriver, receivedOffers: java.util.List[Offer]) {
log.info("Received resource offers\\n")
import scala.collection.JavaConverters._
val offers = receivedOffers.asScala.toList
val offerResources = mutable.HashMap(offers.map(o => (o, Resources(o))).toSeq: _*)
val tasksToLaunch = generateLaunchableTasks(offerResources)
log.info("Declining unused offers.\\n")
val usedOffers = mutable.HashSet(tasksToLaunch.map(_._3.getId.getValue): _*)
offers.foreach(o => {
if (!usedOffers.contains(o.getId.getValue))
mesosDriver.get().declineOffer(o.getId)
})
launchTasks(tasksToLaunch)
// Perform a reconciliation, if needed.
reconcile(schedulerDriver)
}
def generateLaunchableTasks(offerResources: mutable.HashMap[Offer, Resources]): mutable.Buffer[(String, BaseJob, Offer)] = {
val tasks = mutable.Buffer[(String, BaseJob, Offer)]()
def checkConstraints(attributes: Seq[Protos.Attribute], constraints: Seq[Constraint]): Boolean = {
constraints.foreach { c =>
if (!c.matches(attributes)) {
return false
}
}
true
}
@tailrec
def generate() {
taskManager.getTask match {
case None => log.info("No tasks scheduled or next task has been disabled.\\n")
case Some((taskId, job)) =>
if (runningTasks.contains(job.name)) {
val deleted = taskManager.removeTask(taskId)
log.warning("The head of the task queue appears to already be running: " + job.name + "\\n")
generate()
} else {
tasks.find(_._2.name == job.name) match {
case Some((subtaskId, subJob, offer)) =>
val deleted = taskManager.removeTask(subtaskId)
log.warning("Found job in queue that is already scheduled for launch with this offer set: " + subJob.name + "\\n")
generate()
case None =>
val neededResources = new Resources(job)
offerResources.toIterator.find { ors =>
ors._2.canSatisfy(neededResources) && checkConstraints(ors._1.getAttributesList.asScala, job.constraints)
} match {
case Some((offer, resources)) =>
// Subtract this job's resource requirements from the remaining available resources in this offer.
resources -= neededResources
tasks.append((taskId, job, offer))
generate()
case None =>
val foundResources = offerResources.toIterator.map(_._2.toString()).mkString(",")
log.warning(
"Insufficient resources remaining for task '%s', will append to queue. (Needed: [%s], Found: [%s])"
.stripMargin.format(taskId, neededResources, foundResources)
)
taskManager.enqueue(taskId, job.highPriority)
}
}
}
}
}
generate()
tasks
}
def reconcile(schedulerDriver: SchedulerDriver): Unit = {
if (DateTime.now().isAfter(lastReconciliation.plusSeconds(config.reconciliationInterval()))) {
lastReconciliation = DateTime.now()
val taskStatuses = runningTasks.keys.flatMap(id => runningTasks.get(id))
log.info("Performing task reconciliation with the Mesos master")
schedulerDriver.reconcileTasks(taskStatuses.flatMap(task => task.taskStatus).asJavaCollection)
}
}
def launchTasks(tasks: mutable.Buffer[(String, BaseJob, Offer)]) {
import scala.collection.JavaConverters._
tasks.groupBy(_._3).toIterable.foreach({ case (offer, subTasks) =>
val mesosTasks = subTasks.map(task => {
taskBuilder.getMesosTaskInfoBuilder(task._1, task._2, task._3).setSlaveId(task._3.getSlaveId).build()
})
log.info("Launching tasks from offer: " + offer + " with tasks: " + mesosTasks)
val status: Protos.Status = mesosDriver.get().launchTasks(
List(offer.getId).asJava,
mesosTasks.asJava
)
if (status == Protos.Status.DRIVER_RUNNING) {
for (task <- tasks) {
val deleted = taskManager.removeTask(task._1)
log.fine("Successfully launched task '%s' via chronos, task records successfully deleted: '%b'"
.format(task._1, deleted))
runningTasks.put(task._2.name, new ChronosTask(task._3.getSlaveId.getValue))
//TODO(FL): Handle case if chronos can't launch the task.
log.info("Task '%s' launched, status: '%s'".format(task._1, status.toString))
}
} else {
log.warning("Other status returned.")
}
})
}
@Override
def offerRescinded(schedulerDriver: SchedulerDriver, offerID: OfferID) {
//TODO(FL): Handle this case! In practice this isn't a problem as we have retries.
log.warning("Offer rescinded for offer:" + offerID.getValue)
}
@Override
def statusUpdate(schedulerDriver: SchedulerDriver, taskStatus: TaskStatus) {
taskManager.taskCache.put(taskStatus.getTaskId.getValue, taskStatus.getState)
val (jobName, _, _, _) = TaskUtils.parseTaskId(taskStatus.getTaskId.getValue)
taskStatus.getState match {
case TaskState.TASK_RUNNING =>
scheduler.handleStartedTask(taskStatus)
updateRunningTask(jobName, taskStatus)
case TaskState.TASK_STAGING =>
scheduler.handleStartedTask(taskStatus)
updateRunningTask(jobName, taskStatus)
case _ =>
runningTasks.remove(jobName)
}
//TOOD(FL): Add statistics for jobs
taskStatus.getState match {
case TaskState.TASK_FINISHED =>
log.info("Task with id '%s' FINISHED".format(taskStatus.getTaskId.getValue))
//This is a workaround to support async jobs without having to keep yet more state.
if (scheduler.isTaskAsync(taskStatus.getTaskId.getValue)) {
log.info("Asynchronous task: '%s', not updating job-graph.".format(taskStatus.getTaskId.getValue))
} else {
scheduler.handleFinishedTask(taskStatus)
}
case TaskState.TASK_FAILED =>
log.info("Task with id '%s' FAILED".format(taskStatus.getTaskId.getValue))
scheduler.handleFailedTask(taskStatus)
case TaskState.TASK_LOST =>
log.info("Task with id '%s' LOST".format(taskStatus.getTaskId.getValue))
scheduler.handleFailedTask(taskStatus)
case TaskState.TASK_RUNNING =>
log.info("Task with id '%s' RUNNING.".format(taskStatus.getTaskId.getValue))
case TaskState.TASK_KILLED =>
log.info("Task with id '%s' KILLED.".format(taskStatus.getTaskId.getValue))
scheduler.handleKilledTask(taskStatus)
case _ =>
log.warning("Unknown TaskState:" + taskStatus.getState + " for task: " + taskStatus.getTaskId.getValue)
}
// Perform a reconciliation, if needed.
reconcile(schedulerDriver)
}
def updateRunningTask(jobName: String, taskStatus: TaskStatus): Unit = {
runningTasks.get(jobName) match {
case Some(chronosTask) =>
chronosTask.taskStatus = Some(taskStatus)
case _ =>
runningTasks.put(jobName, new ChronosTask(taskStatus.getSlaveId.getValue, Some(taskStatus)))
log.warning(s"Received status update for untracked jobName=$jobName")
}
}
@Override
def frameworkMessage(schedulerDriver: SchedulerDriver, executorID: ExecutorID, slaveID: SlaveID, bytes: Array[Byte]) {
log.info("Framework message received")
}
@Override
def slaveLost(schedulerDriver: SchedulerDriver, slaveID: SlaveID) {
log.warning("Slave lost")
// Remove any running jobs from this slave
val jobs = runningTasks.filter {
case (k, v) =>
slaveID.getValue == v.slaveId
}
runningTasks --= jobs.keys
}
@Override
def executorLost(schedulerDriver: SchedulerDriver, executorID: ExecutorID, slaveID: SlaveID, status: Int) {
log.info("Executor lost")
}
@Override
def error(schedulerDriver: SchedulerDriver, message: String) {
log.severe(message)
scheduler.shutDown()
System.exit(1)
}
private def logOffer(offer: Offer) {
import scala.collection.JavaConversions._
val s = new StringBuilder
offer.getResourcesList.foreach({
x => s.append(f"Name: ${x.getName}")
if (x.hasScalar && x.getScalar.hasValue) {
s.append(f"Scalar: ${x.getScalar.getValue}")
}
})
}
class Resources(var cpus: Double,
var mem: Double,
var disk: Double) {
def this(job: BaseJob) {
this(
if (job.cpus > 0) job.cpus else config.mesosTaskCpu(),
if (job.mem > 0) job.mem else config.mesosTaskMem(),
if (job.disk > 0) job.disk else config.mesosTaskDisk()
)
}
def canSatisfy(needed: Resources): Boolean = {
(this.cpus >= needed.cpus) &&
(this.mem >= needed.mem) &&
(this.disk >= needed.disk)
}
def -=(that: Resources) {
this.cpus -= that.cpus
this.mem -= that.mem
this.disk -= that.disk
}
override def toString: String = {
"cpus: " + this.cpus + " mem: " + this.mem + " disk: " + this.disk
}
}
private class ChronosTask(val slaveId: String,
var taskStatus: Option[TaskStatus] = None) {
override def toString: String = {
s"slaveId=$slaveId, taskStatus=${taskStatus.getOrElse("none").toString}"
}
}
object Resources {
def apply(offer: Offer): Resources = {
val resources = offer.getResourcesList.asScala.filter(r => !r.hasRole || r.getRole == "*" || r.getRole == config.mesosRole())
new Resources(
getScalarValueOrElse(resources.find(_.getName == "cpus"), 0),
getScalarValueOrElse(resources.find(_.getName == "mem"), 0),
getScalarValueOrElse(resources.find(_.getName == "disk"), 0)
)
}
}
}
| wndhydrnt/chronos | src/main/scala/org/apache/mesos/chronos/scheduler/mesos/MesosJobFramework.scala | Scala | apache-2.0 | 12,934 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.utils.tf.loaders
import java.nio.ByteOrder
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.nn.{Identity => IdentityOps}
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.tf.Context
import org.tensorflow.framework.NodeDef
import scala.reflect.ClassTag
class Placeholder extends TensorflowOpsLoader {
override def build[T: ClassTag](nodeDef: NodeDef, byteOrder: ByteOrder,
context: Context[T])(implicit ev: TensorNumeric[T]): Module[T] = {
IdentityOps[T]
}
}
| wzhongyuan/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/utils/tf/loaders/Placeholder.scala | Scala | apache-2.0 | 1,196 |
package com.scalegray.test
import org.scalatest._
import com.scalegray._
import io.jvm.uuid._
class NodeSpec extends FlatSpec {
"Node" should "return a UID" in {
val uid: Long = 9223372036854775807L
val stack = new Node(uid)
stack.getUID()
//assert(1 === 1)
}
}
| scalegray/concorde | src/test/scala/io/scalegray/test/NodeSpec.scala | Scala | mit | 287 |
package org.crashstars.common
import java.util.Properties
import org.springframework.core.io.support.PathMatchingResourcePatternResolver
/**
* Created by anavidad on 8/10/15.
*/
object ProjectProperties extends Properties with Logging {
private val patternResolver = new PathMatchingResourcePatternResolver()
private val mappingLocations = patternResolver.getResources("classpath*:properties/**/*.properties")
private def fillProperties(): Unit = {
logDebug("Initializing properties...")
mappingLocations.foreach(x => {
logDebug(s"File $x")
load(x.getInputStream)
})
}
fillProperties()
}
| anavidad3/PoC-spark-scala-maven | src/main/scala/org/crashstars/common/ProjectProperties.scala | Scala | apache-2.0 | 631 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package reflect
package internal
import scala.language.implicitConversions
import java.lang.annotation.{ Annotation => jAnnotation }
import java.lang.reflect.{
Constructor => jConstructor, Method => jMethod,
Type => jType, TypeVariable => jTypeVariable
}
/** This class tries to abstract over some of the duplication
* in java.lang.reflect.{ Method, Constructor }.
*/
sealed abstract class JMethodOrConstructor {
def isVarArgs: Boolean = this match {
case JMethod(m) => m.isVarArgs
case JConstructor(m) => m.isVarArgs
}
def typeParams: Array[_ <: jTypeVariable[_]] = this match {
case JMethod(m) => m.getTypeParameters
case JConstructor(m) => m.getTypeParameters
}
def paramTypes: Array[jType] = this match {
case JMethod(m) => m.getGenericParameterTypes
case JConstructor(m) => m.getGenericParameterTypes
}
def paramAnnotations: Array[Array[jAnnotation]] = this match {
case JMethod(m) => m.getParameterAnnotations
case JConstructor(m) => m.getParameterAnnotations
}
def resultType: jType = this match {
case JMethod(m) => m.getGenericReturnType
case JConstructor(_) => classOf[Unit]
}
}
object JMethodOrConstructor {
implicit def liftMethodToJmoc(m: jMethod): JMethodOrConstructor = JMethod(m)
implicit def liftConstructorToJmoc(m: jConstructor[_]): JMethodOrConstructor = JConstructor(m)
}
final case class JMethod(m: jMethod) extends JMethodOrConstructor
final case class JConstructor(m: jConstructor[_]) extends JMethodOrConstructor
| scala/scala | src/reflect/scala/reflect/internal/JMethodOrConstructor.scala | Scala | apache-2.0 | 1,869 |
package mesosphere.marathon
package core.matcher.manager.impl
import java.util
import java.util.concurrent.TimeUnit
import akka.pattern.ask
import akka.testkit.TestActorRef
import akka.util.Timeout
import mesosphere.AkkaUnitTest
import mesosphere.marathon.core.instance.LocalVolumeId
import mesosphere.marathon.test.SettableClock
import mesosphere.marathon.core.matcher.base.OfferMatcher
import mesosphere.marathon.core.matcher.base.util.ActorOfferMatcher
import mesosphere.marathon.core.matcher.manager.OfferMatcherManagerConfig
import mesosphere.marathon.state.PathId
import mesosphere.marathon.test.MarathonTestHelper
import org.apache.mesos.Protos.Offer
import org.rogach.scallop.ScallopConf
import org.scalatest.concurrent.Eventually
import rx.lang.scala.Observer
import scala.concurrent.{ Future, Promise }
import scala.util.{ Random, Success }
import scala.concurrent.duration._
class OfferMatcherManagerActorTest extends AkkaUnitTest with Eventually {
"OfferMatcherManagerActor" should {
"The list of OfferMatchers is random without precedence" in new Fixture {
Given("OfferMatcher with num normal matchers")
val num = 5
val appId = PathId("/some/app")
val manager = offerMatcherManager
val matchers = 1.to(num).map(_ => matcher())
matchers.map { matcher => manager ? OfferMatcherManagerDelegate.AddOrUpdateMatcher(matcher) }
When("The list of offer matchers is fetched")
val orderedMatchers = manager.underlyingActor.offerMatchers(reservedOffer(appId))
Then("The list is sorted in the correct order")
orderedMatchers should have size num.toLong
orderedMatchers should contain theSameElementsAs matchers
}
"The list of OfferMatchers is sorted by precedence" in new Fixture {
Given("OfferMatcher with num precedence and num normal matchers, registered in mixed order")
val num = 5
val appId = PathId("/some/app")
val manager = offerMatcherManager
1.to(num).flatMap(_ => Seq(matcher(), matcher(Some(appId)))).map { matcher =>
manager ? OfferMatcherManagerDelegate.AddOrUpdateMatcher(matcher)
}
When("The list of offer matchers is fetched")
val sortedMatchers = manager.underlyingActor.offerMatchers(reservedOffer(appId))
Then("The list is sorted in the correct order")
sortedMatchers should have size 2 * num.toLong
val (left, right) = sortedMatchers.splitAt(num)
left.count(_.precedenceFor.isDefined) should be(num)
right.count(_.precedenceFor.isDefined) should be(0)
}
"queue offers, if the maximum number of offer matchers is busy" in new Fixture {
Given("OfferMatcher with one matcher")
val offerMatch1 = Promise[OfferMatcher.MatchedInstanceOps]
val offerMatch2 = Promise[OfferMatcher.MatchedInstanceOps]
val offer1 = offer()
val offer2 = offer()
val offerPass1 = Promise[OfferMatcher.MatchedInstanceOps]
val offerPass2 = Promise[OfferMatcher.MatchedInstanceOps]
offerMatcherManager.underlyingActor.launchTokens = 100
val offerPass = Map(offer1 -> offerPass1.future, offer2 -> offerPass2.future)
offerMatcherManager ? OfferMatcherManagerDelegate.AddOrUpdateMatcher(matcherWith(offerPass))
When("one offer is send to the manager")
offerMatcherManager ! ActorOfferMatcher.MatchOffer(offer1, offerMatch1)
Then("The offer is not queued")
offerMatcherManager.underlyingActor.unprocessedOffers should have size 0
When("another offer is send to the manager")
offerMatcherManager ! ActorOfferMatcher.MatchOffer(offer2, offerMatch2)
Then("One offer should be queued, since all matchers are busy")
eventually(offerMatcherManager.underlyingActor.unprocessedOffers should have size 1)
When("The offer is matched")
offerPass1.complete(Success(OfferMatcher.MatchedInstanceOps(offer1.getId, Seq.empty)))
offerPass2.complete(Success(OfferMatcher.MatchedInstanceOps(offer2.getId, Seq.empty)))
Then("The queued offer is taken")
eventually(offerMatcherManager.underlyingActor.unprocessedOffers should have size 0)
And("The promise should be fullfilled")
offerMatch1.future.futureValue.opsWithSource should be('empty)
offerMatch2.future.futureValue.opsWithSource should be('empty)
}
"decline offers immediately, if nobody is interested in offers" in new Fixture {
Given("OfferMatcher with one matcher")
val offerMatch1 = Promise[OfferMatcher.MatchedInstanceOps]
val offerMatch2 = Promise[OfferMatcher.MatchedInstanceOps]
val offer1 = offer()
val offer2 = offer()
offerMatcherManager.underlyingActor.launchTokens = 100
When("2 offers are send to the manager")
offerMatcherManager ! ActorOfferMatcher.MatchOffer(offer1, offerMatch1)
offerMatcherManager ! ActorOfferMatcher.MatchOffer(offer2, offerMatch2)
Then("One offer is declined immediately")
offerMatch1.future.futureValue.resendThisOffer should be(false)
offerMatch2.future.futureValue.resendThisOffer should be(false)
}
"decline offers immediately, if the maximum number of offer matchers is busy and the offers queue is full" in new Fixture {
Given("OfferMatcher with one matcher")
val offerMatch1 = Promise[OfferMatcher.MatchedInstanceOps]
val offerMatch2 = Promise[OfferMatcher.MatchedInstanceOps]
val offerMatch3 = Promise[OfferMatcher.MatchedInstanceOps]
val offer1 = offer()
val offer2 = offer()
val offer3 = offer()
val offerPass1 = Promise[OfferMatcher.MatchedInstanceOps]
val offerPass2 = Promise[OfferMatcher.MatchedInstanceOps]
offerMatcherManager.underlyingActor.launchTokens = 100
val offerPass = Map(offer1 -> offerPass1.future, offer2 -> offerPass2.future)
offerMatcherManager ? OfferMatcherManagerDelegate.AddOrUpdateMatcher(matcherWith(offerPass))
When("2 offers are send to the manager")
offerMatcherManager ! ActorOfferMatcher.MatchOffer(offer1, offerMatch1)
offerMatcherManager ! ActorOfferMatcher.MatchOffer(offer2, offerMatch2)
Then("One offer is matched and one should be queued")
eventually(offerMatcherManager.underlyingActor.unprocessedOffers should have size 1)
When("Another offer is send")
offerMatcherManager ! ActorOfferMatcher.MatchOffer(offer3, offerMatch3)
Then("The offer is declined immediately")
offerMatch3.future.futureValue.resendThisOffer should be(true)
offerMatch1.isCompleted should be(false)
offerMatch2.isCompleted should be(false)
And("If the matcher passes matching, the resulting promise should be fulfilled")
offerPass1.complete(Success(OfferMatcher.MatchedInstanceOps(offer1.getId, Seq.empty)))
offerPass2.complete(Success(OfferMatcher.MatchedInstanceOps(offer2.getId, Seq.empty)))
offerMatch1.future.futureValue.opsWithSource should be('empty)
offerMatch2.future.futureValue.opsWithSource should be('empty)
}
"overdue offers are rejected after the deadline" in new Fixture(Seq("--max_parallel_offers", "1", "--max_queued_offers", "100", "--offer_matching_timeout", "10")) {
Given("OfferMatcher with one matcher")
val offer1 = offer()
val offer2 = offer()
val offer3 = offer()
val offerMatch1 = Promise[OfferMatcher.MatchedInstanceOps]
val offerMatch2 = Promise[OfferMatcher.MatchedInstanceOps]
val offerMatch3 = Promise[OfferMatcher.MatchedInstanceOps]
offerMatcherManager.underlyingActor.launchTokens = 100
offerMatcherManager.underlyingActor.matchers += matcher()
When("1 offer is send, which is passed to the matcher, 2 offers are send and queued with a 10 millis gap")
offerMatcherManager ! ActorOfferMatcher.MatchOffer(offer1, offerMatch1)
offerMatcherManager ! ActorOfferMatcher.MatchOffer(offer2, offerMatch2)
clock += 10.millis
offerMatcherManager ! ActorOfferMatcher.MatchOffer(offer3, offerMatch3)
Then("offer-2 is declined, due to timeout but not offer-3")
offerMatch2.future.futureValue.opsWithSource should be('empty)
offerMatch3.isCompleted should be(false)
offerMatch1.isCompleted should be(false)
And("After 10 millis also offer-2 is declined")
clock += 10.millis
offerMatch3.future.futureValue.opsWithSource should be('empty)
offerMatch1.isCompleted should be(false)
}
"offers are rejected if the matcher does not respond in time" in new Fixture(Seq("--max_parallel_offers", "1", "--max_queued_offers", "100", "--offer_matching_timeout", "10")) {
Given("OfferMatcher with one matcher")
val offer1 = offer()
val offerMatch1 = Promise[OfferMatcher.MatchedInstanceOps]
offerMatcherManager.underlyingActor.launchTokens = 100
offerMatcherManager.underlyingActor.matchers += matcher()
When("1 offer is send, which is passed to the matcher, but the matcher does not respond")
offerMatcherManager ! ActorOfferMatcher.MatchOffer(offer1, offerMatch1)
clock += 30.millis
Then("offer-1 is declined, since the actor did not respond in time")
offerMatch1.future.futureValue.opsWithSource should be('empty)
}
}
implicit val timeout = Timeout(3, TimeUnit.SECONDS)
class Fixture(config: Seq[String] = Seq("--max_parallel_offers", "1", "--max_queued_offers", "1")) {
val metrics = new OfferMatcherManagerActorMetrics()
val random = new Random(new util.Random())
val idGen = 1.to(Int.MaxValue).iterator
val clock = new SettableClock()
val observer = Observer.apply[Boolean]((a: Boolean) => ())
object Config extends ScallopConf(config) with OfferMatcherManagerConfig {
verify()
}
val offerMatcherManager = TestActorRef[OfferMatcherManagerActor](OfferMatcherManagerActor.props(metrics, random, clock, Config, observer))
def matcher(precedence: Option[PathId] = None): OfferMatcher = {
val matcher = mock[OfferMatcher]
val promise = Promise[OfferMatcher.MatchedInstanceOps]
matcher.precedenceFor returns precedence
matcher.matchOffer(any) returns promise.future
matcher
}
def matcherWith(fn: Offer => Future[OfferMatcher.MatchedInstanceOps]): OfferMatcher = {
val matcher = mock[OfferMatcher]
matcher.precedenceFor returns None
matcher.matchOffer(any) answers {
case Array(offer: Offer) => fn(offer)
}
matcher
}
def offer(): Offer = MarathonTestHelper.makeBasicOffer().setId(org.apache.mesos.Protos.OfferID.newBuilder().setValue("offer-" + idGen.next())).build()
def reservedOffer(appId: PathId, path: String = "test"): Offer = {
import MarathonTestHelper._
makeBasicOffer().addResources(reservedDisk(LocalVolumeId(appId, path, "uuid").idString, containerPath = path)).build()
}
}
}
| guenter/marathon | src/test/scala/mesosphere/marathon/core/matcher/manager/impl/OfferMatcherManagerActorTest.scala | Scala | apache-2.0 | 10,866 |
package scabot
package typesafe
import spray.json.{RootJsonFormat, DefaultJsonProtocol}
trait TypesafeApi extends TypesafeApiTypes with DefaultJsonProtocol with TypesafeApiActions { self: core.Core with core.HttpClient with core.Configuration => }
trait TypesafeApiTypes { self: core.Core with core.Configuration =>
case class CLARecord(user: String, signed: Boolean, version: Option[String], currentVersion: String)
}
trait TypesafeJsonProtocol extends TypesafeApiTypes with DefaultJsonProtocol { self: core.Core with core.Configuration =>
private type RJF[x] = RootJsonFormat[x]
implicit lazy val _fmtCLARecord: RJF[CLARecord] = jsonFormat4(CLARecord)
}
trait TypesafeApiActions extends TypesafeJsonProtocol { self: core.Core with core.Configuration with core.HttpClient =>
class TypesafeConnection {
import spray.http.{GenericHttpCredentials, Uri}
import spray.httpx.SprayJsonSupport._
import spray.client.pipelining._
private implicit def connection = setupConnection("www.typesafe.com")
import spray.json._
def checkCla(user: String) = pWithStatus[CLARecord](Get(Uri("/contribute/cla/scala/check" / user)))
}
}
| SethTisue/scabot | typesafe/src/main/scala/scabot/typesafe/TypesafeApi.scala | Scala | bsd-3-clause | 1,157 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.storage
import java.io.File
import org.junit.{After, Before, Test}
import org.junit.Assert._
import org.scalatest.mock.MockitoSugar
import org.mockito.Mockito._
import org.mockito.Matchers._
import scala.collection.JavaConversions
import org.apache.samza.container.TaskName
import org.apache.samza.util.Util
import org.apache.samza.system._
import org.apache.samza.system.SystemStreamMetadata.SystemStreamPartitionMetadata
import org.apache.samza.Partition
class TestTaskStorageManager extends MockitoSugar {
val store = "store1"
val loggedStore = "loggedStore1"
val taskName = new TaskName("testTask")
@Before
def setupTestDirs() {
TaskStorageManager.getStorePartitionDir(TaskStorageManagerBuilder.defaultStoreBaseDir, store , taskName)
.mkdirs()
TaskStorageManager.getStorePartitionDir(TaskStorageManagerBuilder.defaultLoggedStoreBaseDir, loggedStore, taskName)
.mkdirs()
}
@After
def tearDownTestDirs() {
Util.rm(TaskStorageManagerBuilder.defaultStoreBaseDir)
Util.rm(TaskStorageManagerBuilder.defaultLoggedStoreBaseDir)
}
@Test
def testCleanBaseDirs() {
val checkFilePath1 = new File(TaskStorageManager.getStorePartitionDir(TaskStorageManagerBuilder.defaultStoreBaseDir, store, taskName), "check")
checkFilePath1.createNewFile()
val checkFilePath2 = new File(TaskStorageManager.getStorePartitionDir(TaskStorageManagerBuilder.defaultLoggedStoreBaseDir, loggedStore, taskName), "check")
checkFilePath2.createNewFile()
val taskStorageManager = new TaskStorageManagerBuilder()
.addStore(store)
.addStore(loggedStore)
.build
//Invoke test method
val cleanDirMethod = taskStorageManager
.getClass
.getDeclaredMethod("cleanBaseDirs",
new Array[java.lang.Class[_]](0):_*)
cleanDirMethod.setAccessible(true)
cleanDirMethod.invoke(taskStorageManager, new Array[Object](0):_*)
assertTrue("check file was found in store partition directory. Clean up failed!", !checkFilePath1.exists())
assertTrue("check file was found in logged store partition directory. Clean up failed!", !checkFilePath2.exists())
}
@Test
def testCleanBaseDirsWithOffsetFileForLoggedStore() {
val offsetFilePath = new File(TaskStorageManager.getStorePartitionDir(TaskStorageManagerBuilder.defaultLoggedStoreBaseDir, loggedStore, taskName), "OFFSET")
Util.writeDataToFile(offsetFilePath, "100")
val taskStorageManager = new TaskStorageManagerBuilder()
.addStore(loggedStore)
.build
val cleanDirMethod = taskStorageManager.getClass.getDeclaredMethod("cleanBaseDirs",
new Array[java.lang.Class[_]](0):_*)
cleanDirMethod.setAccessible(true)
cleanDirMethod.invoke(taskStorageManager, new Array[Object](0):_*)
assertTrue("Offset file was removed. Clean up failed!", offsetFilePath.exists())
assertEquals("Offset read does not match what was in the file", "100", taskStorageManager.fileOffset.get(new SystemStreamPartition("kafka", "testStream", new Partition(0))))
}
@Test
def testStopCreatesOffsetFileForLoggedStore() {
val partition = new Partition(0)
val offsetFilePath = new File(TaskStorageManager.getStorePartitionDir(TaskStorageManagerBuilder.defaultLoggedStoreBaseDir, loggedStore, taskName) + File.separator + "OFFSET")
val mockSystemAdmin = mock[SystemAdmin]
val mockSspMetadata = Map("testStream" -> new SystemStreamMetadata("testStream" , JavaConversions.mapAsJavaMap[Partition, SystemStreamPartitionMetadata](Map(partition -> new SystemStreamPartitionMetadata("20", "100", "101")))))
val myMap = JavaConversions.mapAsJavaMap[String, SystemStreamMetadata](mockSspMetadata)
when(mockSystemAdmin.getSystemStreamMetadata(any(JavaConversions.setAsJavaSet(Set("")).getClass))).thenReturn(myMap)
//Build TaskStorageManager
val taskStorageManager = new TaskStorageManagerBuilder()
.addStore(loggedStore)
.setSystemAdmin("kafka", mockSystemAdmin)
.setPartition(partition)
.build
//Invoke test method
taskStorageManager.stop()
//Check conditions
assertTrue("Offset file doesn't exist!", offsetFilePath.exists())
assertEquals("Found incorrect value in offset file!", "100", Util.readDataFromFile(offsetFilePath))
}
@Test
def testFlushCreatesOffsetFileForLoggedStore() {
val partition = new Partition(0)
val offsetFilePath = new File(TaskStorageManager.getStorePartitionDir(TaskStorageManagerBuilder.defaultLoggedStoreBaseDir, loggedStore, taskName) + File.separator + "OFFSET")
val mockSystemAdmin = mock[SystemAdmin]
val mockSspMetadata = Map("testStream" -> new SystemStreamMetadata("testStream" , JavaConversions.mapAsJavaMap[Partition, SystemStreamPartitionMetadata](Map(partition -> new SystemStreamPartitionMetadata("20", "100", "101")))))
val myMap = JavaConversions.mapAsJavaMap[String, SystemStreamMetadata](mockSspMetadata)
when(mockSystemAdmin.getSystemStreamMetadata(any(JavaConversions.setAsJavaSet(Set("")).getClass))).thenReturn(myMap)
//Build TaskStorageManager
val taskStorageManager = new TaskStorageManagerBuilder()
.addStore(loggedStore)
.setSystemAdmin("kafka", mockSystemAdmin)
.setPartition(partition)
.build
//Invoke test method
taskStorageManager.flush()
//Check conditions
assertTrue("Offset file doesn't exist!", offsetFilePath.exists())
assertEquals("Found incorrect value in offset file!", "100", Util.readDataFromFile(offsetFilePath))
}
@Test
def testFlushOverwritesOffsetFileForLoggedStore() {
val partition = new Partition(0)
val offsetFilePath = new File(TaskStorageManager.getStorePartitionDir(TaskStorageManagerBuilder.defaultLoggedStoreBaseDir, loggedStore, taskName) + File.separator + "OFFSET")
Util.writeDataToFile(offsetFilePath, "100")
val mockSystemAdmin = mock[SystemAdmin]
var mockSspMetadata = Map("testStream" -> new SystemStreamMetadata("testStream" , JavaConversions.mapAsJavaMap[Partition, SystemStreamPartitionMetadata](Map(partition -> new SystemStreamPartitionMetadata("20", "139", "140")))))
var myMap = JavaConversions.mapAsJavaMap[String, SystemStreamMetadata](mockSspMetadata)
when(mockSystemAdmin.getSystemStreamMetadata(any(JavaConversions.setAsJavaSet(Set("")).getClass))).thenReturn(myMap)
//Build TaskStorageManager
val taskStorageManager = new TaskStorageManagerBuilder()
.addStore(loggedStore)
.setSystemAdmin("kafka", mockSystemAdmin)
.setPartition(partition)
.build
//Invoke test method
taskStorageManager.flush()
//Check conditions
assertTrue("Offset file doesn't exist!", offsetFilePath.exists())
assertEquals("Found incorrect value in offset file!", "139", Util.readDataFromFile(offsetFilePath))
// Flush again
mockSspMetadata = Map("testStream" -> new SystemStreamMetadata("testStream" , JavaConversions.mapAsJavaMap[Partition, SystemStreamPartitionMetadata](Map(partition -> new SystemStreamPartitionMetadata("20", "193", "194")))))
myMap = JavaConversions.mapAsJavaMap[String, SystemStreamMetadata](mockSspMetadata)
when(mockSystemAdmin.getSystemStreamMetadata(any(JavaConversions.setAsJavaSet(Set("")).getClass))).thenReturn(myMap)
//Invoke test method
taskStorageManager.flush()
//Check conditions
assertTrue("Offset file doesn't exist!", offsetFilePath.exists())
assertEquals("Found incorrect value in offset file!", "193", Util.readDataFromFile(offsetFilePath))
}
@Test
def testFlushOffsetFileExceptionsHandledGracefully(): Unit = {
}
@Test
def testStopShouldNotCreateOffsetFileForEmptyStore() {
val partition = new Partition(0)
val offsetFilePath = new File(TaskStorageManager.getStorePartitionDir(TaskStorageManagerBuilder.defaultLoggedStoreBaseDir, loggedStore, taskName) + File.separator + "OFFSET")
val mockSystemAdmin = mock[SystemAdmin]
val mockSspMetadata = Map("testStream" -> new SystemStreamMetadata("testStream" , JavaConversions.mapAsJavaMap[Partition, SystemStreamPartitionMetadata](Map(partition -> new SystemStreamPartitionMetadata("20", null, null)))))
val myMap = JavaConversions.mapAsJavaMap[String, SystemStreamMetadata](mockSspMetadata)
when(mockSystemAdmin.getSystemStreamMetadata(any(JavaConversions.setAsJavaSet(Set("")).getClass))).thenReturn(myMap)
//Build TaskStorageManager
val taskStorageManager = new TaskStorageManagerBuilder()
.addStore(loggedStore)
.setSystemAdmin("kafka", mockSystemAdmin)
.setPartition(partition)
.build
//Invoke test method
taskStorageManager.stop()
//Check conditions
assertTrue("Offset file should not exist!", !offsetFilePath.exists())
}
}
object TaskStorageManagerBuilder {
val defaultStoreBaseDir = new File(System.getProperty("java.io.tmpdir") + File.separator + "store")
val defaultLoggedStoreBaseDir = new File(System.getProperty("java.io.tmpdir") + File.separator + "loggedStore")
}
class TaskStorageManagerBuilder extends MockitoSugar {
var taskStores: Map[String, StorageEngine] = Map()
var storeConsumers: Map[String, SystemConsumer] = Map()
var changeLogSystemStreams: Map[String, SystemStream] = Map()
val streamMetadataCache = mock[StreamMetadataCache]
var partition: Partition = new Partition(0)
var systemAdmins: Map[String, SystemAdmin] = Map("kafka" -> mock[SystemAdmin])
var taskName: TaskName = new TaskName("testTask")
var storeBaseDir: File = TaskStorageManagerBuilder.defaultStoreBaseDir
var loggedStoreBaseDir: File = TaskStorageManagerBuilder.defaultLoggedStoreBaseDir
var changeLogStreamPartitions: Int = 1
def addStore(storeName: String): TaskStorageManagerBuilder = {
taskStores = taskStores ++ Map(storeName -> mock[StorageEngine])
storeConsumers = storeConsumers ++ Map(storeName -> mock[SystemConsumer])
changeLogSystemStreams = changeLogSystemStreams ++ Map(storeName -> new SystemStream("kafka", "testStream"))
this
}
def setPartition(p: Partition) = {
partition = p
this
}
def setChangeLogSystemStreams(storeName: String, systemStream: SystemStream) = {
changeLogSystemStreams = changeLogSystemStreams ++ Map(storeName -> systemStream)
this
}
def setSystemAdmin(system: String, systemAdmin: SystemAdmin) = {
systemAdmins = systemAdmins ++ Map(system -> systemAdmin)
this
}
def setTaskName(tn: TaskName) = {
taskName = tn
this
}
def build: TaskStorageManager = {
new TaskStorageManager(
taskName = taskName,
taskStores = taskStores,
storeConsumers = storeConsumers,
changeLogSystemStreams = changeLogSystemStreams,
changeLogStreamPartitions = changeLogStreamPartitions,
streamMetadataCache = streamMetadataCache,
storeBaseDir = storeBaseDir,
loggedStoreBaseDir = loggedStoreBaseDir,
partition = partition,
systemAdmins = systemAdmins
)
}
} | vjagadish/samza-clone | samza-core/src/test/scala/org/apache/samza/storage/TestTaskStorageManager.scala | Scala | apache-2.0 | 11,929 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.imp
import collection.mutable
import java.io.File
import org.joda.time.DateTime
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers, Suite}
import cmwell.tlog.{TLog, TLogState}
import cmwell.domain._
import cmwell.common._
import cmwell.driver.{Dao, DaoExecution}
import cmwell.common.DeleteAttributesCommand
import cmwell.common.WriteCommand
import cmwell.domain.FString
import cmwell.domain.FInt
import cmwell.irw.IRWService
import cmwell.util.{BoxedFailure, EmptyBox, FullBox}
import k.grid._
import scala.util.{Failure, Success, Try}
import scala.concurrent.Future
import scala.concurrent.Await
import scala.concurrent.duration._
import scala.concurrent.ExecutionContext.Implicits.global
/**
* Created with IntelliJ IDEA.
* User: markz
* Date: 12/10/12
* Time: 8:05 PM
*
*/
trait IMPServiceTest extends BeforeAndAfterAll { this:Suite =>
var tlogCass:TLog = _
var uuidLogCass: TLog = _
var dao : Dao = _
var irw : IRWService = _
var imp : IMPServiceImpl = _
var impState : TLogState = _
override protected def beforeAll() {
super.beforeAll()
def init = {
System.setProperty("cmwell.home", new File(".").getCanonicalPath + File.separator + "target")
System.setProperty("dataCenter.id", "dc_test")
dao = Dao("Test", "data")
// in order for irw and imp to invoke new ZStore, "data2" must exist:
implicit val daoProxy = dao
new DaoExecution {
def addKeyspaceData2() = {
val stmt = dao.getSession.prepare("CREATE KEYSPACE IF NOT EXISTS data2 WITH REPLICATION = {'class': 'SimpleStrategy', 'replication_factor': 1 };").bind
executeAsyncInternal(stmt)
}
}.addKeyspaceData2
tlogCass = TLog("TLog", "w1")
uuidLogCass = TLog("UuidLog", "w1")
tlogCass.init()
uuidLogCass.init()
// create infoton read write service
irw = IRWService(dao, 25, true, 120.seconds)
impState = TLogState("imp", "TLog", "w1")
// create imp service
imp = IMPService(tlogCass, uuidLogCass, irw, impState)
Grid.setGridConnection(GridConnection(memberName = "imp", hostName = "127.0.0.1", port = 7777, seeds = Set("127.0.0.1:7777"), clusterName = "localTest"))
Grid.join
}
while(Try(init).isFailure) {
Thread.sleep(250)
}
}
override protected def afterAll() {
tlogCass.shutdown()
uuidLogCass.shutdown()
dao.shutdown()
Grid.shutdown
super.afterAll()
}
}
class IMPCassSpec extends FlatSpec with Matchers with IMPServiceTest {
val logger = {
import org.slf4j.LoggerFactory
com.typesafe.scalalogging.Logger(LoggerFactory.getLogger("ROOT"))
}
val timeToWait = 1000
val waitDuration = timeToWait.millis
"update command on a fresh directory" should "be successful" in {
val m : Map[String , Set[FieldValue]]= Map("name" -> Set(FString("gal"), FString("yoav")), "types" -> Set(FString("123"), FInt(123)))
val cmdUpdate = UpdatePathCommand("/cmd/p1/p2/p3/update" , Map.empty[String , Set[FieldValue]], m , new DateTime )
val payload : Array[Byte] = CommandSerializer.encode(cmdUpdate)
tlogCass.write(payload)
Thread.sleep(timeToWait)
// do the merge and persist
imp.stopAfterChunks = 1
imp.process
// check parents
val p = Vector("/cmd/p1/p2/p3/update","/cmd/p1/p2/p3","/cmd/p1/p2","/cmd/p1","/cmd")
val ps = Await.result(irw.readPathsAsync(p), waitDuration)
ps.size should equal (p.size)
}
"ignore alredy deleted infoton" should "be successful" in {
val m : Map[String , Set[FieldValue]]= Map("name" -> Set(FString("gal"), FString("yoav")), "types" -> Set(FString("123"), FInt(123)))
val obj = ObjectInfoton("/cmd/delete/dupdeltest","dc_test",None,m)
val cmdWrite = WriteCommand(obj)
val payload : Array[Byte] = CommandSerializer.encode(cmdWrite)
tlogCass.write(payload)
Thread.sleep(timeToWait * 10)
// do the merge and persist
imp.stopAfterChunks = 1
imp.process
val cmdDelete1 = DeletePathCommand("/cmd/delete/dupdeltest" , new DateTime())
tlogCass.write(CommandSerializer.encode(cmdDelete1))
Thread.sleep(timeToWait * 10)
// do the merge and persist
imp.stopAfterChunks = 1
imp.process
val cmdDelete2 = DeletePathCommand("/cmd/delete/dupdeltest" , new DateTime())
tlogCass.write(CommandSerializer.encode(cmdDelete2))
Thread.sleep(timeToWait * 10)
// do the merge and persist
imp.stopAfterChunks = 1
imp.process
val paths = irw.history("/cmd/delete/dupdeltest",100000)
paths.size should equal (2)
}
"write and merge" should "be successful" in {
var data : mutable.Buffer[Infoton] = new mutable.ListBuffer[Infoton]()
// create 1000 object infotons
for ( i <- 0 until 10 ) {
val m : Map[String , Set[FieldValue]]= Map("name" -> Set(FString("gal"), FString("yoav")), "types" -> Set(FString("123"), FInt(123)))
val objInfo = ObjectInfoton("/cmt/cm/command-test/objinfo_" + i,"dc_test", None, m)
data += objInfo
}
// iterate buffer infoton and write them to TLog
for ( item <- data ) {
// create write command
val cmdWrite = WriteCommand(item)
val payload : Array[Byte] = CommandSerializer.encode(cmdWrite)
tlogCass.write(payload)
}
Thread.sleep(timeToWait);
// do the merge and persist
imp.stopAfterChunks = 1
imp.process
// check if can real all infotons
for ( item <- data ) {
val fields = item.fields.get
val typeCheck1 = fields("types").exists(_.isInstanceOf[FString])
val typeCheck2 = fields("types").exists(_.isInstanceOf[FInt])
val info = Await.result(irw.readUUIDAsync(item.uuid),waitDuration)
info match {
case FullBox(i) => { item.path should equal (i.path);
item.uuid should equal (i.uuid);
item.lastModified.getMillis should equal (i.lastModified.getMillis);
typeCheck1 should equal (true)
typeCheck2 should equal (true)
}
case EmptyBox => fail("empty box")
case BoxedFailure(e) => fail(e)
}
}
// now lets check the read path method
for ( item <- data ) {
val info = Await.result(irw.readPathAsync(item.path),waitDuration)
info match {
case FullBox(i) => { item.path should equal (i.path);item.uuid should equal (i.uuid);item.lastModified.getMillis should equal (i.lastModified.getMillis); }
case EmptyBox => fail("empty box")
case BoxedFailure(e) => fail(e)
}
}
data = new mutable.ListBuffer[Infoton]()
// add a new kid name
for ( i <- 0 until 10 ) {
val m : Map[String , Set[FieldValue]] = Map("name" -> Set(FString("roni")))
val objInfo = ObjectInfoton("/cmt/cm/command-test/objinfo_" + i ,"dc_test", None, m)
data += objInfo
}
// iterate buffer infoton and write them to TLog
for ( item <- data ) {
// create write command
val cmdWrite = WriteCommand(item)
val payload : Array[Byte] = CommandSerializer.encode(cmdWrite)
tlogCass.write(payload)
}
Thread.sleep(timeToWait)
// do the merge and persist
imp.stopAfterChunks = 1
imp.process
// now lets check the read path method
for ( item <- data ) {
val info = Await.result(irw.readPathAsync(item.path),waitDuration)
info match {
case FullBox(i) => { item.path should equal (i.path);item.lastModified.getMillis should equal (i.lastModified.getMillis); 3 should equal (i.fields.get("name").size) }
case EmptyBox => fail("empty box")
case BoxedFailure(e) => fail(e)
}
}
data = new mutable.ListBuffer[Infoton]()
// add a new kid name
for ( i <- 0 until 10 ) {
val m : Map[String , Set[FieldValue]] = Map("last_name" -> Set(FString("smith")))
val objInfo = ObjectInfoton("/cmt/cm/command-test/objinfo_" + i,"dc_test", None, m)
data += objInfo
}
// iterate buffer infoton and write them to TLog
for ( item <- data ) {
// create write command
val cmdWrite = WriteCommand(item)
val payload : Array[Byte] = CommandSerializer.encode(cmdWrite)
tlogCass.write(payload)
}
Thread.sleep(timeToWait)
// do the merge and persist
imp.stopAfterChunks = 1
imp.process
// now lets check the read path method
for ( item <- data ) {
val info = Await.result(irw.readPathAsync(item.path),waitDuration)
info match {
case FullBox(i) => { item.path should equal (i.path);item.lastModified.getMillis should equal (i.lastModified.getMillis); 3 should equal (i.fields.get("name").size);1 should equal (i.fields.get("last_name").size) }
case EmptyBox => fail("empty box")
case BoxedFailure(e) => fail(e)
}
}
// now lets delete
data = new mutable.ListBuffer[Infoton]()
for ( i <- 0 until 10 ) {
val m : Map[String , Set[FieldValue]] = Map("name" -> Set(FString("gal"), FString("yoav")))
val objInfo = ObjectInfoton( "/cmt/cm/command-test/objinfo_" + i,"dc_test", None, m)
data += objInfo
}
for ( item <- data ) {
// create write command
val cmdDeleteAtrib = DeleteAttributesCommand(item.path,item.fields.get , new DateTime)
val payload : Array[Byte] = CommandSerializer.encode(cmdDeleteAtrib)
tlogCass.write(payload)
}
Thread.sleep(timeToWait)
// do the merge and persist
imp.stopAfterChunks = 1
imp.process
for ( item <- data ) {
val info = Await.result(irw.readPathAsync(item.path),waitDuration)
info match {
case FullBox(i) => { item.path should equal (i.path);1 should equal (i.fields.get("name").size);1 should equal (i.fields.get("last_name").size) }
case EmptyBox => fail("empty box")
case BoxedFailure(e) => fail(e)
}
}
// now lets delete
data = new mutable.ListBuffer[Infoton]()
for ( i <- 0 until 10 ) {
val m : Map[String , Set[FieldValue]] = Map("last_name" -> Set(FString("smith")))
val objInfo = ObjectInfoton("/cmt/cm/command-test/objinfo_" + i ,"dc_test", None, m)
data += objInfo
}
for ( item <- data ) {
// create write command
val cmdDeleteAttributes = DeleteAttributesCommand(item.path,item.fields.get, new DateTime)
val payload : Array[Byte] = CommandSerializer.encode(cmdDeleteAttributes)
tlogCass.write(payload)
}
Thread.sleep(timeToWait)
// do the merge and persist
imp.stopAfterChunks = 1
imp.process
for ( item <- data ) {
val info = Await.result(irw.readPathAsync(item.path),waitDuration)
info match {
case FullBox(i) => {
item.path should equal(i.path)
1 should equal(i.fields.get("name").size)
1 should equal(i.fields.size)
}
case EmptyBox => fail("empty box")
case BoxedFailure(e) => fail(e)
}
}
data = new mutable.ListBuffer[Infoton]()
for ( i <- 0 until 10 ) {
val m : Map[String , Set[FieldValue]] = Map("L" -> Set(FString("U"),FString("I")),"Location" -> Set(FString("USA")),"COMPANY" -> Set(FString("IBM"),FString("FACEBOOK"),FString("GOOGLE")))
val objInfo = ObjectInfoton("/cmt/cm/update/command-test/objinfo_" + i,"dc_test", None, m)
data += objInfo
}
for ( item <- data ) {
// create write command
val cmdUpdate = UpdatePathCommand(item.path,Map.empty, item.fields.get, new DateTime)
val payload : Array[Byte] = CommandSerializer.encode(cmdUpdate)
tlogCass.write(payload)
}
Thread.sleep(timeToWait)
// do the merge and persist
imp.stopAfterChunks = 1
imp.process
for ( item <- data ) {
val info = Await.result(irw.readPathAsync(item.path),waitDuration)
info match {
case FullBox(i) => {
item.path should equal (i.path)
val data = i.fields.get
1 should equal ( data("Location").size )
3 should equal ( data("COMPANY").size )
2 should equal ( data("L").size )
}
case EmptyBox => fail(s"Option was empty - infoton could not be read from IRW for path ${item.path}")
case BoxedFailure(e) => fail(e)
}
}
val m = Await.result(Future.traverse(data){ item =>
irw.readPathAsync(item.path).map(item.path -> _.toOption)
}.map(_.toMap.withDefaultValue(None)),waitDuration*10)
logger.info(s"infotons before ingesting: $m")
for ( item <- data ) {
// create write command
val d_f : Map[String , Set[FieldValue]] = Map("L" -> Set.empty[FieldValue], "Location" -> Set(FString("USA")),"COMPANY" -> Set(FString("GOOGLE"),FString("FACEBOOK")) )
val u_f : Map[String , Set[FieldValue]] = Map("COMPANY" -> Set(FString("WAZE")))
val cmdUpdate = UpdatePathCommand(item.path,d_f, u_f, new DateTime)
val payload : Array[Byte] = CommandSerializer.encode(cmdUpdate)
tlogCass.write(payload)
}
// do the merge and persist
imp.stopAfterChunks = 1
imp.process
for ( item <- data ) {
//spin-check data is persisted.
var flag = true
var wait = waitDuration
var increasingWait = 50.millis
var cntr = 51
do {
logger.info(s"waiting for ${item.path} to be persisted.")
val f = cmwell.util.concurrent.SimpleScheduler.scheduleFuture(increasingWait){
irw.readPathAsync(item.path, cmwell.irw.QUORUM)
}
Try(Await.result(f,wait)) match {
case Success(opt) => opt.foreach{ i =>
m(i.path).foreach{ j =>
logger.info(s"new infoton read from irw [${i.uuid}]: $i")
logger.info(s"infoton stored in map [${j.uuid}]: $j")
flag = i.fields.fold(true)(m => m.size != 1 && m.get("COMPANY").fold(true)(_.size != 2))
}
}
case Failure(err) => logger.error(s"Await for irw.readPathAsync(${item.path}) failed",err)
}
cntr -= 1
wait += waitDuration
increasingWait += 50.millis
} while(flag && cntr > 0)
val info = Await.result(irw.readPathAsync(item.path, cmwell.irw.QUORUM),waitDuration)
info match {
case FullBox(i) => {
logger.info(s"received infoton for path='${item.path}': $i")
item.path should equal (i.path)
val data = i.fields.get
withClue(i) {
2 should equal ( data("COMPANY").size )
1 should equal ( data.size )
}
}
case EmptyBox => fail(s"did not receive infoton for path='${item.path}'")
case BoxedFailure(e) => fail(e)
}
}
data = new mutable.ListBuffer[Infoton]()
for ( i <- 0 until 10 ) {
val m : Map[String , Set[FieldValue]]= Map("name" -> Set(FString("gal"), FString("yoav")), "types" -> Set(FString("123"), FInt(123)))
val objInfo = ObjectInfoton("/cmt/cm/command-test/delete/objinfo_" + i,"dc_test", None, m)
data += objInfo
}
// iterate buffer infoton and write them to TLog
for ( item <- data ) {
// create write command
val cmdWrite = WriteCommand(item)
val payload : Array[Byte] = CommandSerializer.encode(cmdWrite)
tlogCass.write(payload)
}
Thread.sleep(timeToWait)
// do the merge and persist
imp.stopAfterChunks = 1
imp.process
// check if can real all infotons
for ( item <- data ) {
val fields = item.fields
val typeCheck1 = fields.get("types").toList(0).isInstanceOf[FString]
val typeCheck2 = fields.get("types").toList(1).isInstanceOf[FInt]
val info = Await.result(irw.readUUIDAsync(item.uuid),waitDuration)
info match {
case FullBox(i) => { item.path should equal (i.path)
item.uuid should equal (i.uuid)
item.lastModified.getMillis should equal (i.lastModified.getMillis)
typeCheck1 should equal (true)
typeCheck2 should equal (true)
}
case EmptyBox => fail("empty box")
case BoxedFailure(e) => fail(e)
}
}
// now lets check the read path method
for ( item <- data ) {
val info = Await.result(irw.readPathAsync(item.path),waitDuration)
info match {
case FullBox(i) => { item.path should equal (i.path);item.uuid should equal (i.uuid);item.lastModified.getMillis should equal (i.lastModified.getMillis) }
case EmptyBox => fail("empty box")
case BoxedFailure(e) => fail(e)
}
}
// now lets delete all infotons
for ( item <- data ) {
val cmdDeletePath = DeletePathCommand(item.path , new DateTime)
val payload : Array[Byte] = CommandSerializer.encode(cmdDeletePath)
tlogCass.write(payload)
}
Thread.sleep(timeToWait)
// do the merge and persist
imp.stopAfterChunks = 1
imp.process
for ( item <- data ) {
val info = Await.result(irw.readPathAsync(item.path),waitDuration)
info match {
case FullBox(i) => {
item.path should equal (i.path)
i match {
case d:DeletedInfoton =>
true should equal (true)
case _ =>
true should equal (false)
}
}
case EmptyBox => fail("empty box")
case BoxedFailure(e) => fail(e)
}
}
// let's write
for ( item <- data ) {
// create write command
val cmdWrite = WriteCommand(item)
val payload : Array[Byte] = CommandSerializer.encode(cmdWrite)
tlogCass.write(payload)
}
Thread.sleep(timeToWait)
// do the merge and persist
imp.stopAfterChunks = 1
imp.process
// now lets check the read path method
for ( item <- data ) {
val info = Await.result(irw.readPathAsync(item.path),waitDuration)
info match {
case FullBox(i) => item.path should equal (i.path)
case EmptyBox => fail("empty box")
case BoxedFailure(e) => fail(e)
}
}
}
}
| nruppin/CM-Well | server/cmwell-imp/src/test/scala/cmwell/imp/IMPCassSpec.scala | Scala | apache-2.0 | 18,552 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.mxnet
import java.nio.{ByteBuffer, ByteOrder}
import org.apache.mxnet.Base._
import org.apache.mxnet.DType.DType
import org.apache.mxnet.MX_PRIMITIVES.{MX_PRIMITIVE_TYPE}
import org.slf4j.LoggerFactory
import scala.collection.mutable
import scala.collection.mutable.{ArrayBuffer, ListBuffer}
import scala.language.implicitConversions
import scala.ref.WeakReference
import scala.util.Try
/**
* NDArray Object extends from NDArrayBase for abstract function signatures
* Main code will be generated during compile time through Macros
*/
@AddNDArrayFunctions(false)
object NDArray extends NDArrayBase {
implicit def getFirstResult(ret: NDArrayFuncReturn): NDArray = ret(0)
private val logger = LoggerFactory.getLogger(classOf[NDArray])
private val functions: Map[String, NDArrayFunction] = initNDArrayModule()
val api = NDArrayAPI
val random = NDArrayRandomAPI
private def addDependency(froms: Array[NDArray], tos: Array[NDArray]): Unit = {
froms.foreach { from =>
val weakRef = new WeakReference(from)
tos.foreach { to =>
to.dependencies.put(from.handle, weakRef)
// we add all dep's dep to prevent (recursively) recomputing at runtime.
to.dependencies ++= from.dependencies
}
}
}
/**
* Used by NDArrayMacro.
* Invoke this function by passing in parameters.
* Parameters
* ----------
* @param args Positional arguments of input scalars and NDArray
* @param kwargs Key-value arguments of input scalars
* @return The result NDArrays of result of computation.
*/
private[mxnet] def genericNDArrayFunctionInvoke(
funcName: String, args: Seq[Any], kwargs: Map[String, Any] = null): NDArrayFuncReturn = {
val function = functions(funcName)
val ndArgs = ArrayBuffer.empty[NDArray]
val posArgs = ArrayBuffer.empty[String]
args.foreach {
case arr: NDArray =>
ndArgs.append(arr)
case arrFunRet: NDArrayFuncReturn =>
arrFunRet.arr.foreach(ndArgs.append(_))
case arg =>
posArgs.append(arg.toString)
}
require(posArgs.length <= function.arguments.length,
s"len(posArgs) = ${posArgs.length}, should be less or equal to len(arguments) " +
s"= ${function.arguments.length}")
val updatedKwargs: Map[String, String] =
(Option(kwargs).getOrElse(Map.empty[String, String])
++ function.arguments.slice(0, posArgs.length).zip(posArgs) - "out"
).map { case (k, v) => k -> v.toString }
val (oriOutputs, outputVars) =
if (kwargs != null && kwargs.contains("out")) {
val output = kwargs("out")
output match {
case nd: NDArray => (Array(nd), Array(nd.handle))
case ndFuncRet: NDArrayFuncReturn => (ndFuncRet.arr, ndFuncRet.arr.map(_.handle))
// Seq[NDArray] erasure problem explained here https://stackoverflow.com/questions/1094173/
case ndArr: Seq[NDArray @unchecked] =>
if (ndArr.head.isInstanceOf[NDArray]) (ndArr.toArray, ndArr.toArray.map(_.handle))
else throw new IllegalArgumentException(
s"""Unsupported out ${output.getClass} type,
| should be NDArray or subclass of Seq[NDArray]""".stripMargin)
case _ => throw new IllegalArgumentException(
s"""Unsupported out ${output.getClass} type,
| should be NDArray or subclass of Seq[NDArray]""".stripMargin)
}
} else {
(null, null)
}
val outputs = ArrayBuffer.empty[NDArrayHandle]
checkCall(_LIB.mxImperativeInvoke(function.handle, ndArgs.map(_.handle).toArray, outputVars,
outputs, updatedKwargs.size, updatedKwargs.keys.toArray, updatedKwargs.values.toArray))
new NDArrayFuncReturn(Option(oriOutputs).getOrElse {
val outputArrs = outputs.map(new NDArray(_)).toArray
addDependency(ndArgs.toArray, outputArrs)
outputArrs
})
}
/**
* Return a new empty handle.
* Empty handle can be used to hold result
*
* @return a new empty ndarray handle
*/
private def newEmptyHandle(): NDArrayHandle = {
val hdl = new NDArrayHandleRef
checkCall(_LIB.mxNDArrayCreateNone(hdl))
hdl.value
}
/**
* Return a new handle with specified shape and context.
* Empty handle is only used to hold results
*
* @return a new empty ndarray handle
*/
private def newAllocHandle(shape: Shape,
ctx: Context,
delayAlloc: Boolean,
dtype: DType = DType.Float32): NDArrayHandle = {
val hdl = new NDArrayHandleRef
checkCall(_LIB.mxNDArrayCreateEx(
shape.toArray,
shape.length,
ctx.deviceTypeid,
ctx.deviceId,
if (delayAlloc) 1 else 0,
dtype.id,
hdl))
hdl.value
}
/**
* Wait all async operation to finish in MXNet
* This function is used for benchmark only
*/
def waitall(): Unit = {
checkCall(_LIB.mxNDArrayWaitAll())
}
// List and add all the atomic symbol functions to current module.
private def initNDArrayModule(): Map[String, NDArrayFunction] = {
val opNames = ListBuffer.empty[String]
checkCall(_LIB.mxListAllOpNames(opNames))
opNames.map(opName => {
val opHandle = new RefLong
checkCall(_LIB.nnGetOpHandle(opName, opHandle))
makeNDArrayFunction(opHandle.value, opName)
}).toMap
}
// Create an atomic symbol function by handle and function name.
private def makeNDArrayFunction(handle: NDArrayHandle, aliasName: String)
: (String, NDArrayFunction) = {
val name = new RefString
val desc = new RefString
val keyVarNumArgs = new RefString
val numArgs = new RefInt
val argNames = ListBuffer.empty[String]
val argTypes = ListBuffer.empty[String]
val argDescs = ListBuffer.empty[String]
checkCall(_LIB.mxSymbolGetAtomicSymbolInfo(
handle, name, desc, numArgs, argNames, argTypes, argDescs, keyVarNumArgs))
val arguments = (argTypes zip argNames).filter { case (dtype, _) =>
!(dtype.startsWith("NDArray") || dtype.startsWith("Symbol")
|| dtype.startsWith("NDArray-or-Symbol"))
}.map { case (_, argName) =>
argName
}
(aliasName, new NDArrayFunction(handle, arguments.toList))
}
/**
* One hot encoding indices into matrix out.
* @param indices An NDArray containing indices of the categorical features.
* @param out The result holder of the encoding.
* @return Same as out.
*/
def onehotEncode(indices: NDArray, out: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke(
"_onehot_encode", Seq(indices, out), Map("out" -> out))(0)
}
/**
* Create an empty uninitialized new NDArray, with specified shape.
*
* @param shape shape of the NDArray.
* @param ctx The context of the NDArray, default to current default context.
*
* @return The created NDArray.
*/
def empty(shape: Shape, ctx: Context = null, dtype: DType = Base.MX_REAL_TYPE): NDArray = {
val context = if (ctx == null) Context.defaultCtx else ctx
new NDArray(handle = NDArray.newAllocHandle(shape, context, delayAlloc = false, dtype))
}
def empty(shape: Int *): NDArray = empty(Shape(shape: _*))
def empty(ctx: Context, shape: Int *): NDArray = empty(Shape(shape: _*), ctx)
/**
* Create a new NDArray filled with 0, with specified shape.
*
* @param shape shape of the NDArray.
* @param ctx The context of the NDArray, default to current default context.
*
* @return The created NDArray.
*/
def zeros(shape: Shape, ctx: Context = null, dtype: DType = Base.MX_REAL_TYPE): NDArray = {
val arr = empty(shape, ctx, dtype)
arr.set(0f)
arr
}
def zeros(shape: Int *): NDArray = zeros(Shape(shape: _*))
def zeros(ctx: Context, shape: Int *): NDArray = zeros(Shape(shape: _*), ctx)
/**
* Create a new NDArray filled with 1, with specified shape.
* @param shape shape of the NDArray.
* @param ctx The context of the NDArray, default to current default context.
* @return The created NDArray.
*/
def ones(shape: Shape, ctx: Context = null, dtype: DType = Base.MX_REAL_TYPE): NDArray = {
val arr = empty(shape, ctx, dtype)
arr.set(1f)
arr
}
def ones(shape: Int *): NDArray = ones(Shape(shape: _*))
def ones(ctx: Context, shape: Int *): NDArray = ones(Shape(shape: _*), ctx)
/**
* Create a new NDArray filled with given value, with specified shape.
* @param shape shape of the NDArray.
* @param value value to be filled with
* @param ctx The context of the NDArray, default to current default context
*/
def full(shape: Shape, value: Float, ctx: Context = null): NDArray = {
val arr = empty(shape, ctx)
arr.set(value)
arr
}
def full(shape: Shape, value: Double, ctx: Context): NDArray = {
val arr = empty(shape, ctx, DType.Float64)
arr.set(value)
arr
}
/**
* Create a new NDArray filled with given value, with specified shape.
* @param shape shape of the NDArray.
* @param value value to be filled with
*/
def full(shape: Shape, value: Double): NDArray = {
full(shape, value, null)
}
/**
* Perform power operation on NDArray. Returns result as NDArray
* @param lhs
* @param rhs
*/
def power(lhs: NDArray, rhs: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_power", Seq(lhs, rhs))
}
/**
* Perform scalar power operation on NDArray. Returns result as NDArray
* @param lhs NDArray on which to perform the operation on.
* @param rhs The scalar input. Can be of type Float/Double
*/
def power(lhs: NDArray, rhs: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_power_scalar", Seq(lhs, rhs))
}
/**
* Perform scalar power operation on NDArray. Returns result as NDArray
* @param lhs The scalar input. Can be of type Float/Double
* @param rhs NDArray on which to perform the operation on.
*/
def power(lhs: MX_PRIMITIVE_TYPE, rhs: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_rpower_scalar", Seq(lhs, rhs))
}
// Perform maximum operator
def maximum(lhs: NDArray, rhs: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_maximum", Seq(lhs, rhs))
}
/**
* Perform the max operation on NDArray. Returns the result as NDArray.
* @param lhs NDArray on which to perform the operation on.
* @param rhs The scalar input. Can be of type Float/Double
*/
def maximum(lhs: NDArray, rhs: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_maximum_scalar", Seq(lhs, rhs))
}
/**
* Perform the max operation on NDArray. Returns the result as NDArray.
* @param lhs The scalar input. Can be of type Float/Double
* @param rhs NDArray on which to perform the operation on.
*/
def maximum(lhs: MX_PRIMITIVE_TYPE, rhs: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_maximum_scalar", Seq(lhs, rhs))
}
// Perform minimum operator
def minimum(lhs: NDArray, rhs: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_minimum", Seq(lhs, rhs))
}
/**
* Perform the min operation on NDArray. Returns the result as NDArray.
* @param lhs NDArray on which to perform the operation on.
* @param rhs The scalar input. Can be of type Float/Double
*/
def minimum(lhs: NDArray, rhs: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_minimum_scalar", Seq(lhs, rhs))
}
/**
* Perform the min operation on NDArray. Returns the result as NDArray.
* @param lhs The scalar input. Can be of type Float/Double
* @param rhs NDArray on which to perform the operation on.
*/
def minimum(lhs: MX_PRIMITIVE_TYPE, rhs: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_minimum_scalar", Seq(lhs, rhs))
}
/**
* Returns the result of element-wise **equal to** (==) comparison operation with broadcasting.
* For each element in input arrays, return 1(true) if corresponding elements are same,
* otherwise return 0(false).
*/
def equal(lhs: NDArray, rhs: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("broadcast_equal", Seq(lhs, rhs))
}
/**
* Returns the result of element-wise **equal to** (==) comparison operation with broadcasting.
* For each element in input arrays, return 1(true) if corresponding elements are same,
* otherwise return 0(false).
*
* @param lhs NDArray
* @param rhs The scalar input. Can be of type Float/Double
*/
def equal(lhs: NDArray, rhs: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_equal_scalar", Seq(lhs, rhs))
}
/**
* Returns the result of element-wise **not equal to** (!=) comparison operation
* with broadcasting.
* For each element in input arrays, return 1(true) if corresponding elements are different,
* otherwise return 0(false).
*/
def notEqual(lhs: NDArray, rhs: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("broadcast_not_equal", Seq(lhs, rhs))
}
/**
* Returns the result of element-wise **not equal to** (!=) comparison operation
* with broadcasting.
* For each element in input arrays, return 1(true) if corresponding elements are different,
* otherwise return 0(false).
* @param lhs NDArray
* @param rhs The scalar input. Can be of type Float/Double
*/
def notEqual(lhs: NDArray, rhs: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_not_equal_scalar", Seq(lhs, rhs))
}
/**
* Returns the result of element-wise **greater than** (>) comparison operation
* with broadcasting.
* For each element in input arrays, return 1(true) if lhs elements are greater than rhs,
* otherwise return 0(false).
*/
def greater(lhs: NDArray, rhs: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("broadcast_greater", Seq(lhs, rhs))
}
/**
* Returns the result of element-wise **greater than** (>) comparison operation
* with broadcasting.
* For each element in input arrays, return 1(true) if lhs elements are greater than rhs,
* otherwise return 0(false).
*
* @param lhs NDArray
* @param rhs The scalar input. Can be of type Float/Double
*/
def greater(lhs: NDArray, rhs: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_greater_scalar", Seq(lhs, rhs))
}
/**
* Returns the result of element-wise **greater than or equal to** (>=) comparison
* operation with broadcasting.
* For each element in input arrays, return 1(true) if lhs elements are greater than equal to rhs,
* otherwise return 0(false).
*/
def greaterEqual(lhs: NDArray, rhs: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("broadcast_greater_equal", Seq(lhs, rhs))
}
/**
* Returns the result of element-wise **greater than or equal to** (>=) comparison
* operation with broadcasting.
* For each element in input arrays, return 1(true) if lhs elements are greater than equal to
* rhs, otherwise return 0(false).
*
* @param lhs NDArray
* @param rhs The scalar input. Can be of type Float/Double
*/
def greaterEqual(lhs: NDArray, rhs: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_greater_equal_scalar", Seq(lhs, rhs))
}
/**
* Returns the result of element-wise **lesser than** (<) comparison operation
* with broadcasting.
* For each element in input arrays, return 1(true) if lhs elements are less than rhs,
* otherwise return 0(false).
*/
def lesser(lhs: NDArray, rhs: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("broadcast_lesser", Seq(lhs, rhs))
}
/**
* Returns the result of element-wise **lesser than** (<) comparison operation
* with broadcasting.
* For each element in input arrays, return 1(true) if lhs elements are less than rhs,
* otherwise return 0(false).
* @param lhs NDArray
* @param rhs The scalar input. Can be of type Float/Double
*/
def lesser(lhs: NDArray, rhs: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_lesser_scalar", Seq(lhs, rhs))
}
/**
* Returns the result of element-wise **lesser than or equal to** (<=) comparison
* operation with broadcasting.
* For each element in input arrays, return 1(true) if lhs elements are
* lesser than equal to rhs, otherwise return 0(false).
*/
def lesserEqual(lhs: NDArray, rhs: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("broadcast_lesser_equal", Seq(lhs, rhs))
}
/**
* Returns the result of element-wise **lesser than or equal to** (<=) comparison
* operation with broadcasting.
* For each element in input arrays, return 1(true) if lhs elements are
* lesser than equal to rhs, otherwise return 0(false).
*
* @param lhs NDArray
* @param rhs The scalar input. Can be of type Float/Double
*/
def lesserEqual(lhs: NDArray, rhs: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_lesser_equal_scalar", Seq(lhs, rhs))
}
/**
* Create a new NDArray that copies content from source_array.
* @param sourceArr Source data to create NDArray from.
* @param shape shape of the NDArray
* @param ctx The context of the NDArray, default to current default context.
* @return The created NDArray.
*/
def array(sourceArr: Array[Float], shape: Shape, ctx: Context = null): NDArray = {
val arr = empty(shape, ctx)
arr.set(sourceArr)
arr
}
def array(sourceArr: Array[Double], shape: Shape, ctx: Context): NDArray = {
val arr = empty(shape, ctx, dtype = DType.Float64)
arr.set(sourceArr)
arr
}
def array(sourceArr: Array[Double], shape: Shape): NDArray = {
array(sourceArr, shape, null)
}
/**
* Create a new NDArray based on the structure of source Array
* @param sourceArr Array[Array...Array[MX_PRIMITIVE_TYPE]...]
* @param ctx context like to pass in
* @return an NDArray with the same shape of the input
* @throws IllegalArgumentException if the data type is not valid
*/
def toNDArray(sourceArr: Array[_], ctx : Context = null) : NDArray = {
val shape = shapeGetter(sourceArr)
val container = new Array[Any](shape.product)
flattenArray(sourceArr, container, 0, container.length - 1)
val finalArr = container(0) match {
case f: Float => array(container.map(_.asInstanceOf[Float]), Shape(shape), ctx)
case d: Double => array(container.map(_.asInstanceOf[Double]), Shape(shape), ctx)
case _ => throw new IllegalArgumentException(
s"Unsupported type ${container(0).getClass}, please check MX_PRIMITIVES for valid types")
}
finalArr
}
private def shapeGetter(sourceArr : Any) : ArrayBuffer[Int] = {
sourceArr match {
// e.g : Array[Double] the inner layer
case arr: Array[_] if MX_PRIMITIVES.isValidMxPrimitiveType(arr(0)) => {
ArrayBuffer[Int](arr.length)
}
// e.g : Array[Array...[]]
case arr: Array[_] => {
var arrBuffer = new ArrayBuffer[Int]()
if (!arr.isEmpty) arrBuffer = shapeGetter(arr(0))
for (idx <- arr.indices) {
require(arrBuffer == shapeGetter(arr(idx)))
}
arrBuffer.insert(0, arr.length)
arrBuffer
}
case _ => throw new IllegalArgumentException(s"Wrong type passed: ${sourceArr.getClass}")
}
}
private def flattenArray(sourceArr : Any, arr : Array[Any],
start : Int, end : Int) : Unit = {
sourceArr match {
case arrValid: Array[_] if MX_PRIMITIVES.isValidMxPrimitiveType(arrValid(0)) => {
for (i <- arrValid.indices) arr(start + i) = arrValid(i)
}
case arrAny: Array[_] => {
val fragment = (end - start + 1) / arrAny.length
for (i <- arrAny.indices)
flattenArray(arrAny(i), arr, start + i * fragment, start + (i + 1) * fragment)
}
case _ => throw new IllegalArgumentException(s"Wrong type passed: ${sourceArr.getClass}")
}
}
/**
* Returns evenly spaced values within a given interval.
* Values are generated within the half-open interval [`start`, `stop`). In other
* words, the interval includes `start` but excludes `stop`.
* @param start Start of interval. The default start value is 0.
* @param stop End of interval.
* @param step Spacing between values. The default step size is 1.
* @param repeat Number of times to repeat each element. The default repeat count is 1.
* @param infer_range
* When set to True, infer the stop position from the start, step,
* repeat, and output tensor size.
* @param ctx Device context. Default context is the current default context.
* @param dType The data type of the `NDArray`. The default datatype is `DType.Float32`.
* @return NDArray of evenly spaced values in the specified range.
*/
def arange(start: Float, stop: Option[Float], step: Float,
repeat: Int, ctx: Context, dType: DType): NDArray = {
val params = Map("start" -> start, "step" -> step, "repeat" -> repeat,
"infer_range" -> false, "ctx" -> ctx.toString, "dtype" -> dType.toString())
val fParams = if (stop == None) params else params ++ Map("stop" -> stop.get)
NDArray.genericNDArrayFunctionInvoke("_arange", Seq(), fParams)(0)
}
/**
* Concatenate a list of NDArrays along the specified dimension.
* @param arrays Arrays to be concatenate.
* They must have identical shape except the first dimension.
* They also must have the same data type.
* @param axis The axis along which to concatenate.
* @param alwaysCopy Default `True`. When not `True`,
* if the arrays only contain one `NDArray`,
* that element will be returned directly, avoid copying.
* @return An `NDArray` that lives on the same context as `arrays[0].context`.
*/
def concatenate(arrays: Seq[NDArray], axis: Int = 0, alwaysCopy: Boolean = true): NDArray = {
require(arrays.size > 0, "Provide at least one array")
val array0 = arrays(0)
if (!alwaysCopy && arrays.size == 1) {
array0
} else {
val shapeRest1 = array0.shape.slice(0, axis)
val shapeRest2 = array0.shape.slice(axis + 1, array0.shape.length)
val dtype = array0.dtype
val shapeAxis =
arrays.map(arr => {
require(shapeRest1 == arr.shape.slice(0, axis),
s"Mismatch between shape $shapeRest1 and ${arr.shape}")
require(shapeRest2 == arr.shape.slice(axis + 1, arr.shape.length),
s"Mismatch between shape $shapeRest2 and ${arr.shape}")
require(dtype == arr.dtype,
s"All arrays must have the same type (got ${dtype} and ${arr.dtype})")
arr.shape(axis)
}).sum
val retShape = shapeRest1 ++ Shape(shapeAxis) ++ shapeRest2
val ret = NDArray.empty(retShape, ctx = array0.context, dtype = dtype)
var idx = 0
val begin = Array.fill(retShape.length)(0)
val end = retShape.toArray
for (arr <- arrays) {
if (axis == 0) {
ret.slice(idx, idx + arr.shape(0)).set(arr).dispose()
} else {
begin(axis) = idx
end(axis) = idx + arr.shape(axis)
NDArray._crop_assign(Map("out" -> ret,
"begin" -> Shape(begin),
"end" -> Shape(end)))(ret, arr)
}
idx += arr.shape(axis)
}
ret
}
}
def concatenate(arrays: NDArray *): NDArray = {
concatenate(arrays.toSeq)
}
/**
* Load ndarray from binary file.
*
* You can also use pickle to do the job if you only work on python.
* The advantage of load/save is the file is language agnostic.
* This means the file saved using save can be loaded by other language binding of mxnet.
* You also get the benefit being able to directly load/save from cloud storage(S3, HDFS)
*
* @param fname
* The name of the file.Can be S3 or HDFS address (remember built with S3 support).
* Example of fname:
* - `s3://my-bucket/path/my-s3-ndarray`
* - `hdfs://my-bucket/path/my-hdfs-ndarray`
* - `/path-to/my-local-ndarray`
* @return dict of str->NDArray
*/
def load(fname: String): (Array[String], Array[NDArray]) = {
val outSize = new MXUintRef
val outNameSize = new MXUintRef
val handles = ArrayBuffer.empty[NDArrayHandle]
val names = ArrayBuffer.empty[String]
checkCall(_LIB.mxNDArrayLoad(fname, outSize, handles, outNameSize, names))
require(outNameSize.value == 0 || outNameSize.value == outSize.value,
s"Mismatch between names and arrays in file $fname")
(names.toArray, handles.map(new NDArray(_)).toArray)
}
def load2Map(fname: String): Map[String, NDArray] = {
val (keys, vals) = load(fname)
require(keys.length == vals.length, "Loaded NDArrays have no name")
(keys zip vals).toMap
}
def load2Array(fname: String): Array[NDArray] = {
load(fname)._2
}
/**
* Save list of NDArray or dict of str->NDArray to binary file.
*
* You can also use pickle to do the job if you only work on python.
* The advantage of load/save is the file is language agnostic.
* This means the file saved using save can be loaded by other language binding of mxnet.
* You also get the benefit being able to directly load/save from cloud storage(S3, HDFS)
*
* @param fname
* The name of the file.Can be S3 or HDFS address (remember built with S3 support).
* Example of fname:
* - `s3://my-bucket/path/my-s3-ndarray`
* - `hdfs://my-bucket/path/my-hdfs-ndarray`
* - `/path-to/my-local-ndarray`
* @param data dict of str->NDArray
*/
def save(fname: String, data: Map[String, NDArray]): Unit = {
val keys = data.keys.toArray
val handles = data.values.map(_.handle).toArray
save(fname, keys, handles)
}
def save(fname: String, data: Traversable[NDArray]): Unit = {
save(fname, null, data.map(_.handle).toArray)
}
private def save(fname: String, keys: Array[String], handles: Array[NDArrayHandle]): Unit = {
checkCall(_LIB.mxNDArraySave(fname, handles, keys))
}
def deserialize(bytes: Array[Byte]): NDArray = {
val handleRef = new NDArrayHandleRef
checkCall(_LIB.mxNDArrayLoadFromRawBytes(bytes, handleRef))
new NDArray(handleRef.value)
}
private def _crop_assign(kwargs: Map[String, Any] = null)(args: Any*) : NDArrayFuncReturn = {
genericNDArrayFunctionInvoke("_crop_assign", args, kwargs)
}
}
/**
* NDArray object in mxnet.
* NDArray is basic ndarray/Tensor like data structure in mxnet. <br />
* <b>
* WARNING: it is your responsibility to clear this object through dispose().
* </b>
*/
class NDArray private[mxnet](private[mxnet] val handle: NDArrayHandle,
val writable: Boolean = true,
addToCollector: Boolean = true) extends NativeResource {
if (addToCollector) {
NDArrayCollector.collect(this)
}
override def nativeAddress: CPtrAddress = handle
override def nativeDeAllocator: (CPtrAddress => Int) = _LIB.mxNDArrayFree
override val bytesAllocated: Long = DType.numOfBytes(this.dtype) * this.shape.product
override val ref: NativeResourceRef = super.register()
// record arrays who construct this array instance
// we use weak reference to prevent gc blocking
private[mxnet] val dependencies = mutable.HashMap.empty[Long, WeakReference[NDArray]]
private val lengthProperty = "mxnet.setNDArrayPrintLength"
private val layerProperty = "mxnet.setNDArrayPrintLayerLength"
private lazy val printLength = Try(System.getProperty(lengthProperty).toInt).getOrElse(1000)
private lazy val layerLength = Try(System.getProperty(layerProperty).toInt).getOrElse(10)
def serialize(): Array[Byte] = {
val buf = ArrayBuffer.empty[Byte]
checkCall(_LIB.mxNDArraySaveRawBytes(handle, buf))
buf.toArray
}
/**
* Release the native memory. <br />
* The NDArrays it depends on will NOT be disposed. <br />
* The object shall never be used after it is disposed.
*/
override def dispose(): Unit = {
if (!super.isDisposed) {
super.dispose()
dependencies.clear()
}
}
/**
* Dispose all NDArrays who help to construct this array. <br />
* e.g. (a * b + c).disposeDeps() will dispose a, b, c (including their deps) and a * b
* @return this array
*/
def disposeDeps(): NDArray = {
disposeDepsExcept()
}
/**
* Dispose all NDArrays who help to construct this array, excepts those in the arguments. <br />
* e.g. (a * b + c).disposeDepsExcept(a, b)
* will dispose c and a * b.
* Note that a, b's dependencies will not be disposed either.
* @return this array
*/
def disposeDepsExcept(arrs: NDArray*): NDArray = {
if (dependencies != null) {
val excepts = mutable.HashSet.empty[Long]
arrs.foreach { arr =>
excepts += arr.handle
excepts ++= arr.dependencies.keys
}
dependencies.retain { case (addr, weak) =>
if (excepts.contains(addr)) {
true
} else {
weak.get.foreach(_.dispose())
false
}
}
}
this
}
/**
* Peform an synchronize copy from the array.
* @param source The data source we should like to copy from.
*/
private def syncCopyfrom(source: Array[Float]): Unit = {
require(source.length == size,
s"array size (${source.length}) do not match the size of NDArray ($size)")
checkCall(_LIB.mxNDArraySyncCopyFromCPU(handle, source, source.length))
}
private def syncCopyfrom(source: Array[Double]): Unit = {
require(source.length == size,
s"array size (${source.length}) do not match the size of NDArray ($size)")
checkCall(_LIB.mxFloat64NDArraySyncCopyFromCPU(handle, source, source.length))
}
/**
* Visualize the internal structure of NDArray
* @return String that show the structure
*/
override def toString: String = {
val abstractND = buildStringHelper(this, this.shape.length)
val otherInfo = s"<NDArray ${this.shape} ${this.context} ${this.dtype}>"
s"$abstractND\\n$otherInfo"
}
/**
* Helper function to create formatted NDArray output
* The NDArray will be represented in a reduced version if too large
* @param nd NDArray as the input
* @param totalSpace totalSpace of the lowest dimension
* @return String format of NDArray
*/
private def buildStringHelper(nd : NDArray, totalSpace : Int) : String = {
var result = ""
val THRESHOLD = layerLength // longest NDArray[NDArray[...]] to show in full
val ARRAYTHRESHOLD = printLength // longest array to show in full
val shape = nd.shape
val space = totalSpace - shape.length
if (shape.length != 1) {
val (length, postfix) =
if (shape(0) > THRESHOLD) {
// reduced NDArray
(10, s"\\n${" " * (space + 1)}... with length ${shape(0)}\\n")
} else {
(shape(0), "")
}
for (num <- 0 until length) {
val output = buildStringHelper(nd.at(num), totalSpace)
result += s"$output\\n"
}
result = s"${" " * space}[\\n$result${" " * space}$postfix${" " * space}]"
} else {
if (shape(0) > ARRAYTHRESHOLD) {
// reduced Array
val front = nd.slice(0, 10)
val back = nd.slice(shape(0) - 10, shape(0) - 1)
result = s"""${" " * space}[${front.toArray.mkString(",")}
| ... ${back.toArray.mkString(",")}]""".stripMargin
} else {
result = s"${" " * space}[${nd.toArray.mkString(",")}]"
}
}
result
}
/**
* Return a sliced NDArray that shares memory with current one.
* NDArray only support continuous slicing on axis 0
*
* @param start Starting index of slice.
* @param stop Finishing index of slice.
*
* @return a sliced NDArray that shares memory with current one.
*/
def slice(start: Int, stop: Int): NDArray = {
val sliceHandle = new NDArrayHandleRef
checkCall(_LIB.mxNDArraySlice(handle, start, stop, sliceHandle))
new NDArray(handle = sliceHandle.value, writable = this.writable)
}
def slice(range: (Int, Int)): NDArray = {
slice(range._1, range._2)
}
/**
* Return a sliced NDArray at the ith position of axis0
* @param i
* @return a sliced NDArray that shares memory with current one.
*/
def slice(i: Int): NDArray = {
slice(i, i + 1)
}
/**
* Return a sub NDArray that shares memory with current one.
* the first axis will be rolled up, which causes its shape different from slice(i, i+1)
* @param idx index of sub array.
*/
def at(idx: Int): NDArray = {
val handleRef = new NDArrayHandleRef()
checkCall(_LIB.mxNDArrayAt(this.handle, idx, handleRef))
new NDArray(handle = handleRef.value, writable = this.writable)
}
// Get transpose of current NDArray
def T: NDArray = {
require(this.shape.size == 2, "Only 2D matrix is allowed to be transposed")
NDArray.genericNDArrayFunctionInvoke("transpose", Seq(this))
}
/**
* Get data type of current NDArray.
* @return class representing type of current ndarray
*/
def dtype: DType = {
val mxDtype = new RefInt
checkCall(_LIB.mxNDArrayGetDType(handle, mxDtype))
DType(mxDtype.value)
}
/**
* Return a copied numpy array of current array with specified type.
* @param dtype Desired type of result array.
* @return A copy of array content.
*/
def asType(dtype: DType): NDArray = {
val res = NDArray.empty(this.shape, ctx = this.context, dtype = dtype)
this.copyTo(res)
res
}
/**
* Return a reshaped NDArray that shares memory with current one.
* @param dims New shape.
*
* @return a reshaped NDArray that shares memory with current one.
*/
def reshape(dims: Array[Int]): NDArray = {
val reshapeHandle = new NDArrayHandleRef
checkCall(_LIB.mxNDArrayReshape(handle, dims.length, dims, reshapeHandle))
new NDArray(handle = reshapeHandle.value, writable = this.writable)
}
/**
* Return a reshaped NDArray that shares memory with current one.
* @param dims New shape.
*
* @return a reshaped NDArray that shares memory with current one.
*/
def reshape(dims: Shape): NDArray = {
reshape(dims.toArray)
}
/**
* Block until all pending writes operations on current NDArray are finished.
* This function will return when all the pending writes to the current
* NDArray finishes. There can still be pending read going on when the
* function returns.
*/
def waitToRead(): Unit = {
checkCall(_LIB.mxNDArrayWaitToRead(handle))
}
/**
* Get context of current NDArray.
* @return The context of current NDArray.
*/
def context: Context = {
val devTypeId = new RefInt
val devId = new RefInt
checkCall(_LIB.mxNDArrayGetContext(handle, devTypeId, devId))
new Context(Context.devtype2str(devTypeId.value), devId.value)
}
/**
* Set the values of the NDArray
* @param value Value to set
* @return Current NDArray
*/
def set(value: MX_PRIMITIVE_TYPE): NDArray = {
require(writable, "trying to assign to a readonly NDArray")
NDArray.genericNDArrayFunctionInvoke("_set_value", Seq(value), Map("out" -> this))
this
}
def set(other: NDArray): NDArray = {
require(writable, "trying to assign to a readonly NDArray")
other.copyTo(this)
}
def set(other: Array[Float]): NDArray = {
require(writable, "trying to assign to a readonly NDArray")
syncCopyfrom(other)
this
}
def set(other: Array[Double]): NDArray = {
require(writable, "trying to assign to a readonly NDArray")
syncCopyfrom(other)
this
}
def +(other: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_plus", Seq(this, other))
}
def +(other: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_plus_scalar", Seq(this, other))
}
def +=(other: NDArray): NDArray = {
if (!writable) {
throw new IllegalArgumentException("trying to add to a readonly NDArray")
}
NDArray.genericNDArrayFunctionInvoke("_plus", Seq(this, other), Map("out" -> this))
this
}
def +=(other: MX_PRIMITIVE_TYPE): NDArray = {
if (!writable) {
throw new IllegalArgumentException("trying to add to a readonly NDArray")
}
NDArray.genericNDArrayFunctionInvoke("_plus_scalar", Seq(this, other), Map("out" -> this))
this
}
def -(other: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_minus", Seq(this, other))
}
def -(other: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_minus_scalar", Seq(this, other))
}
def -=(other: NDArray): NDArray = {
if (!writable) {
throw new IllegalArgumentException("trying to subtract from a readonly NDArray")
}
NDArray.genericNDArrayFunctionInvoke("_minus", Seq(this, other), Map("out" -> this))
this
}
def -=(other: MX_PRIMITIVE_TYPE): NDArray = {
if (!writable) {
throw new IllegalArgumentException("trying to subtract from a readonly NDArray")
}
NDArray.genericNDArrayFunctionInvoke("_minus_scalar", Seq(this, other), Map("out" -> this))
this
}
def *(other: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_mul", Seq(this, other))
}
def *(other: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_mul_scalar", Seq(this, other))
}
def unary_-(): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_mul_scalar", Seq(this, -1f))
}
def *=(other: NDArray): NDArray = {
if (!writable) {
throw new IllegalArgumentException("trying to multiply to a readonly NDArray")
}
NDArray.genericNDArrayFunctionInvoke("_mul", Seq(this, other), Map("out" -> this))
this
}
def *=(other: MX_PRIMITIVE_TYPE): NDArray = {
if (!writable) {
throw new IllegalArgumentException("trying to multiply to a readonly NDArray")
}
NDArray.genericNDArrayFunctionInvoke("_mul_scalar", Seq(this, other), Map("out" -> this))
this
}
def /(other: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_div", Seq(this, other))
}
def /(other: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_div_scalar", Seq(this, other))
}
def /=(other: NDArray): NDArray = {
if (!writable) {
throw new IllegalArgumentException("trying to divide from a readonly NDArray")
}
NDArray.genericNDArrayFunctionInvoke("_div", Seq(this, other), Map("out" -> this))
this
}
def /=(other: MX_PRIMITIVE_TYPE): NDArray = {
if (!writable) {
throw new IllegalArgumentException("trying to divide from a readonly NDArray")
}
NDArray.genericNDArrayFunctionInvoke("_div_scalar", Seq(this, other), Map("out" -> this))
this
}
def **(other: NDArray): NDArray = {
NDArray.power(this, other)
}
def **(other: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.power(this, other)
}
def **=(other: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_power", Seq(this, other), Map("out" -> this))
}
def **=(other: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_power_scalar", Seq(this, other), Map("out" -> this))
}
def >(other: NDArray): NDArray = {
NDArray.greater(this, other)
}
def >(other: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.greater(this, other)
}
def >=(other: NDArray): NDArray = {
NDArray.greaterEqual(this, other)
}
def >=(other: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.greaterEqual(this, other)
}
def <(other: NDArray): NDArray = {
NDArray.lesser(this, other)
}
def <(other: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.lesser(this, other)
}
def <=(other: NDArray): NDArray = {
NDArray.lesserEqual(this, other)
}
def <=(other: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.lesserEqual(this, other)
}
def %(other: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_mod", Seq(this, other))
}
def %(other: MX_PRIMITIVE_TYPE): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_mod_scalar", Seq(this, other))
}
def %=(other: NDArray): NDArray = {
if (!writable) {
throw new IllegalArgumentException("trying to take modulo from a readonly NDArray")
}
NDArray.genericNDArrayFunctionInvoke("_mod", Seq(this, other), Map("out" -> this))
this
}
def %=(other: MX_PRIMITIVE_TYPE): NDArray = {
if (!writable) {
throw new IllegalArgumentException("trying to take modulo from a readonly NDArray")
}
NDArray.genericNDArrayFunctionInvoke("_mod_scalar", Seq(this, other), Map("out" -> this))
this
}
/**
* Return a copied flat java array of current array (row-major).
* @return A copy of array content.
*/
def toArray: Array[Float] = {
internal.toFloatArray
}
/**
* Return a copied flat java array of current array (row-major) with datatype as Float64/Double.
* @return A copy of array content.
*/
def toFloat64Array: Array[Double] = {
internal.toDoubleArray
}
def internal: NDArrayInternal = {
val myType = dtype
val arrLength = DType.numOfBytes(myType) * size
val arr = Array.ofDim[Byte](arrLength)
checkCall(_LIB.mxNDArraySyncCopyToCPU(handle, arr, size))
new NDArrayInternal(arr, myType)
}
/**
* Return a CPU scalar(float) of current ndarray.
* This ndarray must have shape (1,)
*
* @return The scalar representation of the ndarray.
*/
def toScalar: Float = {
require(shape == Shape(1), "The current array is not a scalar")
this.toArray(0)
}
def toFloat64Scalar: Double = {
require(shape == Shape(1), "The current array is not a scalar")
this.toFloat64Array(0)
}
/**
* Copy the content of current array to other.
*
* @param other Target NDArray or context we want to copy data to.
* @return The copy target NDArray
*/
def copyTo(other: NDArray): NDArray = {
if (other.handle == this.handle) {
NDArray.logger.warn("copy an array to itself, is it intended ?")
} else {
NDArray.genericNDArrayFunctionInvoke("_copyto", Seq(this), Map("out" -> other))
}
other
}
/**
* Copy the content of current array to a new NDArray in the context.
*
* @param ctx Target context we want to copy data to.
* @return The copy target NDArray
*/
def copyTo(ctx: Context): NDArray = {
val ret = new NDArray(NDArray.newAllocHandle(shape, ctx, delayAlloc = true, dtype = dtype))
copyTo(ret)
}
/**
* Clone the current array
* @return the copied NDArray in the same context
*/
def copy(): NDArray = copyTo(this.context)
/**
* Get shape of current NDArray.
* @return an array representing shape of current ndarray
*/
def shape: Shape = {
val ndim = new MXUintRef
val data = ArrayBuffer[Int]()
checkCall(_LIB.mxNDArrayGetShape(handle, ndim, data))
require(ndim.value == data.length, s"ndim=$ndim, while len(data)=${data.length}")
Shape(data)
}
// Get size of current NDArray.
def size: Int = shape.product
/**
* Return an `NDArray` that lives in the target context. If the array
* is already in that context, `self` is returned. Otherwise, a copy is made.
* @param context The target context we want the return value to live in.
* @return A copy or `self` as an `NDArray` that lives in the target context.
*/
def asInContext(context: Context): NDArray = {
if (this.context == context) this else this.copyTo(context)
}
override def equals(o: Any): Boolean = o match {
case that: NDArray =>
that != null && that.shape == this.shape && that.toArray.sameElements(this.toArray)
case _ => false
}
override def hashCode: Int = {
// TODO: naive implementation
shape.hashCode + toArray.hashCode
}
}
private[mxnet] object NDArrayConversions {
implicit def int2Scalar(x: Int): NDArrayConversions = new NDArrayConversions(x.toFloat)
implicit def double2Scalar(x: Double): NDArrayConversions = new NDArrayConversions(x)
implicit def float2Scalar(x: Float): NDArrayConversions = new NDArrayConversions(x)
}
private[mxnet] class NDArrayConversions(val value: MX_PRIMITIVE_TYPE) {
def +(other: NDArray): NDArray = {
other + value
}
def +(other: NDArrayFuncReturn): NDArray = {
other.head + value
}
def -(other: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_rminus_scalar", Seq(other, value))
}
def -(other: NDArrayFuncReturn): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_rminus_scalar", Seq(other.head, value))
}
def *(other: NDArray): NDArray = {
other * value
}
def *(other: NDArrayFuncReturn): NDArray = {
other.head * value
}
def /(other: NDArray): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_rdiv_scalar", Seq(other, value))
}
def /(other: NDArrayFuncReturn): NDArray = {
NDArray.genericNDArrayFunctionInvoke("_rdiv_scalar", Seq(other.head, value))
}
def **(other: NDArray): NDArray = {
NDArray.power(value, other)
}
def **(other: NDArrayFuncReturn): NDArray = {
NDArray.power(value, other.head)
}
def >(other: NDArray): NDArray = {
NDArray.lesser(other, value)
}
def >(other: NDArrayFuncReturn): NDArray = {
NDArray.lesser(other.head, value)
}
def >=(other: NDArray): NDArray = {
NDArray.lesserEqual(other, value)
}
def >=(other: NDArrayFuncReturn): NDArray = {
NDArray.lesserEqual(other.head, value)
}
def <(other: NDArray): NDArray = {
NDArray.greater(other, value)
}
def <(other: NDArrayFuncReturn): NDArray = {
NDArray.greater(other.head, value)
}
def <=(other: NDArray): NDArray = {
NDArray.greaterEqual(other, value)
}
def <=(other: NDArrayFuncReturn): NDArray = {
NDArray.greaterEqual(other.head, value)
}
}
private case class NDArrayFunction(handle: NDArrayHandle, arguments: List[String])
private[mxnet] class NDArrayFuncReturn(private[mxnet] val arr: Array[NDArray]) {
def head: NDArray = apply(0)
def get: NDArray = {
require(arr.length == 1, s"return array length = ${arr.length}")
head
}
def apply(i: Int): NDArray = {
if (arr == null || arr.length <= i) {
null
} else {
arr(i)
}
}
// copy methods from NDArray
def isDisposed: Boolean = head.isDisposed
def serialize(): Array[Byte] = head.serialize()
def dispose(): Unit = head.dispose()
def disposeDeps(): NDArray = head.disposeDeps()
def disposeDepsExcept(arrs: NDArray*): NDArray = head.disposeDepsExcept(arrs: _*)
def slice(start: Int, stop: Int): NDArray = head.slice(start, stop)
def slice(range: (Int, Int)): NDArray = head.slice(range)
def slice(i: Int): NDArray = head.slice(i)
def reshape(dims: Array[Int]): NDArray = head.reshape(dims)
def waitToRead(): Unit = head.waitToRead()
def context: Context = head.context
def set(value: Float): NDArray = head.set(value)
def set(value: Double): NDArray = head.set(value)
def set(other: NDArray): NDArray = head.set(other)
def set(other: Array[Float]): NDArray = head.set(other)
def set(other: Array[Double]): NDArray = head.set(other)
def +(other: NDArray): NDArray = head + other
def +(other: MX_PRIMITIVE_TYPE): NDArray = head + other
def +=(other: NDArray): NDArray = head += other
def +=(other: MX_PRIMITIVE_TYPE): NDArray = head += other
def -(other: NDArray): NDArray = head - other
def -(other: MX_PRIMITIVE_TYPE): NDArray = head - other
def -=(other: NDArray): NDArray = head -= other
def -=(other: MX_PRIMITIVE_TYPE): NDArray = head -= other
def *(other: NDArray): NDArray = head * other
def *(other: MX_PRIMITIVE_TYPE): NDArray = head * other
def unary_-(): NDArray = -head
def *=(other: NDArray): NDArray = head *= other
def *=(other: MX_PRIMITIVE_TYPE): NDArray = head *= other
def /(other: NDArray): NDArray = head / other
def /(other: MX_PRIMITIVE_TYPE): NDArray = head / other
def **(other: NDArray): NDArray = head ** other
def **(other: MX_PRIMITIVE_TYPE): NDArray = head ** other
def >(other: NDArray): NDArray = head > other
def >(other: MX_PRIMITIVE_TYPE): NDArray = head > other
def >=(other: NDArray): NDArray = head >= other
def >=(other: MX_PRIMITIVE_TYPE): NDArray = head >= other
def <(other: NDArray): NDArray = head < other
def <(other: MX_PRIMITIVE_TYPE): NDArray = head < other
def <=(other: NDArray): NDArray = head <= other
def <=(other: MX_PRIMITIVE_TYPE): NDArray = head <= other
def toArray: Array[Float] = head.toArray
def toFloat64Array: Array[Double] = head.toFloat64Array
def toScalar: Float = head.toScalar
def toFloat64Scalar: Double = head.toFloat64Scalar
def copyTo(other: NDArray): NDArray = head.copyTo(other)
def copyTo(ctx: Context): NDArray = head.copyTo(ctx)
def copy(): NDArray = head.copy()
def shape: Shape = head.shape
def size: Int = head.size
def asInContext(context: Context): NDArray = head.asInContext(context)
}
private[mxnet] class NDArrayInternal (private val internal: Array[Byte], private val dtype: DType) {
private val unitSize = DType.numOfBytes(dtype)
require(internal.length > 0 && internal.length % unitSize == 0,
s"$dtype size $unitSize cannot divide byte array size ${internal.length}")
private val units: Array[Array[Byte]] = (
for (i <- 0 until internal.length / unitSize)
yield internal.slice(i * unitSize, (i + 1) * unitSize)
).toArray
def getRaw: Array[Byte] = internal
def toDoubleArray: Array[Double] = {
require(dtype != DType.Float16, "Currently cannot convert float16 to native numerical types")
dtype match {
case DType.Float32 => units.map(wrapBytes(_).getFloat.toDouble)
case DType.Float64 => units.map(wrapBytes(_).getDouble)
case DType.Int32 => units.map(wrapBytes(_).getInt.toDouble)
case DType.UInt8 => internal.map(_.toDouble)
}
}
def toFloatArray: Array[Float] = {
require(dtype != DType.Float16, "Currently cannot convert float16 to native numerical types")
dtype match {
case DType.Float32 => units.map(wrapBytes(_).getFloat)
case DType.Float64 => units.map(wrapBytes(_).getDouble.toFloat)
case DType.Int32 => units.map(wrapBytes(_).getInt.toFloat)
case DType.UInt8 => internal.map(_.toFloat)
}
}
def toIntArray: Array[Int] = {
require(dtype != DType.Float16, "Currently cannot convert float16 to native numerical types")
dtype match {
case DType.Float32 => units.map(wrapBytes(_).getFloat.toInt)
case DType.Float64 => units.map(wrapBytes(_).getDouble.toInt)
case DType.Int32 => units.map(wrapBytes(_).getInt)
case DType.UInt8 => internal.map(_.toInt)
}
}
def toByteArray: Array[Byte] = {
require(dtype != DType.Float16, "Currently cannot convert float16 to native numerical types")
dtype match {
case DType.Float16 | DType.Float32 => units.map(wrapBytes(_).getFloat.toByte)
case DType.Float64 => units.map(wrapBytes(_).getDouble.toByte)
case DType.Int32 => units.map(wrapBytes(_).getInt.toByte)
case DType.UInt8 => internal.clone()
}
}
private def wrapBytes(bytes: Array[Byte]): ByteBuffer = {
val bb = ByteBuffer.wrap(bytes)
bb.order(ByteOrder.LITTLE_ENDIAN)
bb
}
}
| ptrendx/mxnet | scala-package/core/src/main/scala/org/apache/mxnet/NDArray.scala | Scala | apache-2.0 | 52,258 |
package eu.inn.metrics.loaders
import java.util.concurrent.TimeUnit
import com.codahale.metrics.{MetricRegistry, Slf4jReporter}
import scaldi.{Injectable, Injector}
import scala.concurrent.duration.Duration
class Slf4jReporterLoader(period: Duration)(implicit injector: Injector) extends MetricsReporterLoader with Injectable {
lazy val slf4jReporter = {
Slf4jReporter.forRegistry(inject[MetricRegistry]).build()
}
override def run(): Unit = {
if (period.isFinite) {
slf4jReporter.start(period.toMillis, TimeUnit.MILLISECONDS)
} else {
slf4jReporter.hashCode() //instantiate lazy val
}
}
}
| InnovaCo/service-metrics | src/main/scala/eu/inn/metrics/loaders/Slf4jReporterLoader.scala | Scala | bsd-3-clause | 631 |
package com.pwootage.metroidprime.formats
import java.io.{ByteArrayOutputStream, DataOutputStream}
import java.nio.ByteBuffer
import com.pwootage.metroidprime.formats.io.PrimeDataFile
trait BinarySerializable {
def write(f: PrimeDataFile): Unit
def read(f: PrimeDataFile): Unit
def read(bytes: Array[Byte]): Unit = {
read(new PrimeDataFile(bytes))
}
def toByteArray = {
val byteOut = new ByteArrayOutputStream()
this.write(new PrimeDataFile(None, Some(new DataOutputStream(byteOut))))
byteOut.toByteArray
}
} | Pwootage/prime-patcher | src/main/scala/com/pwootage/metroidprime/formats/BinarySerializable.scala | Scala | gpl-3.0 | 563 |
package testdata
import com.github.ghik.silencer.silent
object methodSuppression {
@silent
def method(): Unit = {
123
}
def other(): Unit = {
123
}
}
| ghik/silencer | silencer-plugin/testdata/methodSuppression.scala | Scala | apache-2.0 | 170 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import org.scalatest.{Matchers, WordSpec}
import play.api.libs.json.Json
import uk.gov.hmrc.ct.computations.formats._
class CP14Spec extends WordSpec with Matchers {
implicit val format = Json.format[CP14Holder]
"CP14 to json" should {
"create valid json for int value" in {
val json = Json.toJson(CP14Holder(CP14(1234)))
json.toString shouldBe """{"cp14":1234}"""
}
"create valid json for -ve int" in {
val json = Json.toJson(CP14Holder(CP14(-1234)))
json.toString shouldBe """{"cp14":-1234}"""
}
}
"CP14 from json" should {
"create +ve int from valid json" in {
val json = Json.parse("""{"cp14":1234}""")
Json.fromJson[CP14Holder](json).get shouldBe CP14Holder(cp14 = new CP14(1234))
}
"create -ve int from valid json" in {
val json = Json.parse("""{"cp14":-1234}""")
Json.fromJson[CP14Holder](json).get shouldBe CP14Holder(cp14 = new CP14(-1234))
}
}
}
case class CP14Holder(cp14: CP14)
| scottcutts/ct-calculations | src/test/scala/uk/gov/hmrc/ct/computations/CP14Spec.scala | Scala | apache-2.0 | 1,627 |
object Args {
def foo(x: => Int) = x
foo(1)
def bar(xs: Int*) = xs
bar()
bar(1)
bar(1, 2)
bar(List(1, 2): _*)
} | mdemarne/scalahost | tests/src/test/resources/ScalaToMeta/Args/Original.scala | Scala | bsd-3-clause | 125 |
package com.clarifi.reporting.util
import java.io.{File, OutputStream, InputStream}
object IOUtils {
/**
* Read all the data from the given InputStream
* and copy it to the given OutputStream.
* @return the number of bytes read and sent
*/
def copy(input:InputStream, output:OutputStream, defaultBufferSize:Int=(256), closeInputStream:Boolean=true): Long = try {
val buffer = new Array[Byte](defaultBufferSize)
var count = 0L
var n = input.read(buffer)
while (n != -1) {
output.write(buffer, 0, n)
count += n
n = input.read(buffer)
}
count
} finally if (closeInputStream) input.close()
/**
* Tries to find a file. If it exists, returns Some(file). If not, None.
*/
def findFile(name:String): Option[File] = {
val f = new File(name)
if(f.exists) Some(f) else None
}
/**
* Reads the contents of the given file.
* @throws FileNotFoundException if the file doesn't exist.
*/
def slurp(fileName:String): String = {
val source = scala.io.Source.fromFile(fileName, "UTF-8")
val str = source.mkString
source.close
str
}
}
| ermine-language/ermine-legacy | src/main/scala/com/clarifi/reporting/util/IOUtils.scala | Scala | bsd-2-clause | 1,130 |
package org.bitcoins.testkit.chain.fixture
import org.scalatest.Tag
/**
* If a test file uses ChainFixture as its FixtureParam, then
* using these tags will determine which fixture the test will get.
*
* Simply add taggedAs FixtureTag._ to your test before calling inFixtured.
*/
sealed abstract class ChainFixtureTag(name: String) extends Tag(name)
object ChainFixtureTag {
case object Empty extends ChainFixtureTag("Empty")
case object GenisisBlockHeaderDAO
extends ChainFixtureTag("GenisisBlockHeaderDAO")
case object PopulatedBlockHeaderDAO
extends ChainFixtureTag("PopulatedBlockHeaderDAO")
case object GenisisChainHandler extends ChainFixtureTag("GenisisChainHandler")
case object PopulatedChainHandler
extends ChainFixtureTag("PopulatedChainHandler")
case object BitcoindZmqChainHandlerWithBlock
extends ChainFixtureTag("BitcoindZmqChainHandlerWithBlock")
val defaultTag: ChainFixtureTag = ChainFixtureTag.Empty
def from(tag: String): ChainFixtureTag = {
tag match {
case Empty.name => Empty
case GenisisBlockHeaderDAO.name => GenisisBlockHeaderDAO
case PopulatedBlockHeaderDAO.name => PopulatedBlockHeaderDAO
case GenisisChainHandler.name => GenisisChainHandler
case PopulatedChainHandler.name => PopulatedChainHandler
case BitcoindZmqChainHandlerWithBlock.name =>
BitcoindZmqChainHandlerWithBlock
case _: String =>
throw new IllegalArgumentException(s"$tag is not a valid tag")
}
}
}
| bitcoin-s/bitcoin-s-core | testkit/src/main/scala/org/bitcoins/testkit/chain/fixture/ChainFixtureTag.scala | Scala | mit | 1,547 |
object Test {
val List(x, y, z: _ *,
) = 42 :: 17 :: Nil
def main(args: Array[String]): Unit = {
Console.println(x)
Console.println(y)
}
}
| som-snytt/dotty | tests/run/trailingCommas/trailingCommas.scala | Scala | apache-2.0 | 155 |
package hello
/***********************************************************************/
import org.springframework.boot._
import org.springframework.boot.autoconfigure._
import org.springframework.web.bind.annotation._
import org.springframework.context.annotation.Configuration._
import org.springframework.boot.autoconfigure.EnableAutoConfiguration._
import org.springframework.context.annotation.ComponentScan._
import org.springframework.boot._
import org.springframework.boot.autoconfigure._
import org.springframework.stereotype._
import org.springframework.web.bind.annotation._
import org.springframework.web.bind.annotation.RestController
/***********************************************************************/
object HelloWebApplication {
def main(args: Array[String])
{
SpringApplication.run(classOf[HelloConfig])
}
}
/***********************************************************************/
| vireshjivane/hello-world | src/main/scala/hello/HelloWebApplication.scala | Scala | mit | 916 |
package KeYmaeraD
import scala.actors.Actor
import scala.actors.Actor._
import java.io.InputStream
import java.io.FileOutputStream
import java.io.File
import Nodes._
object TreeActions {
import RulesUtil._
import Procedures._
val myAddr = java.net.InetAddress.getLocalHost().getHostAddress()
var myPort = 0
try {
val testSocket = new java.net.ServerSocket(0)
myPort = testSocket.getLocalPort()
testSocket.close()
} catch {
case ioe: java.io.IOException =>
println("using a random port")
myPort = 50000 + scala.util.Random.nextInt(10000)
}
println("job master will listen on port " + myPort)
val jobs = new scala.collection.mutable.HashMap[NodeID, Long]()
val jobmaster = new Jobs.JobMaster(myPort)
jobmaster.start
var hereNode: ProofNode = nullNode
var treemodel: Option[KeYmaeraD.GUI.TreeModel] = None
var expandNewNodes = true
def getOpenLeaves(nd: ProofNode) : List[ProofNode] = {
val kds = nd.getChildren.map(getnode)
(kds, nd.getStatus, nd) match {
case (Nil, Open, nd1) =>
List(nd1)
case _ =>
val lvs = kds.map(getOpenLeaves).flatten
lvs
}
}
def getnodethen(ndID: NodeID, f: (ProofNode) => Unit ): Unit =
nodeTable.get(ndID) match {
case Some(nd) => f(nd)
case None =>
println ("node " + ndID + " does not exist.")
}
def gotonode(nd: ProofNode) : Unit = {
hereNode = nd
// println("now at node " + nd.nodeID )
}
def shownode(nd: ProofNode) : Unit =
println(nd.toPrettyString)
/*
def showhints(nd: ProofNode, pos: Position): Unit = {
val fm = lookup(pos, nd.goal)
fm match {
case ...
}
}
*/
def attachnodes(pt: ProofNode, newnds: List[ProofNode]): Unit = {
for (newnd <- newnds){
newnd.setParent(Some(pt.nodeID))
register(newnd)
pt.addchild(newnd.nodeID)
}
// println("treemodel attaching nodes: " + newnds)
if (expandNewNodes) {
treemodel.map(_.fireNodesInserted(pt, newnds)) // GUI
}
//treemodel.map(_.fireChanged(pt)) // GUI
}
def attachnode(pt: ProofNode, newnd: ProofNode): Unit = {
attachnodes(pt,List(newnd))
}
def applyrule(hn: OrNode,
p: Position,
rl: ProofRule): Option[List[NodeID]] = try {
val res = try { rl(p)(hn.goal) } catch {case _ : Throwable => None}
res match {
case Some((Nil, _)) | Some((List(Sequent(_,Nil,List(True))),_)) => //proved
val pnd = new DoneNode(rl.toString, hn.goal)
attachnode(hn,pnd)
propagateProvedUp(hn.nodeID, pnd.nodeID)
Some(Nil)
case Some((List(sq), _)) =>
val ornd = new OrNode(rl.toString, sq)
attachnode(hn,ornd)
Some(List(ornd.nodeID))
case Some((sqs, fvs)) =>
val andnd = new AndNode(rl.toString, hn.goal, Nil)
attachnode(hn,andnd)
val subname = rl.toString + " subgoal"
val ornds = sqs.map(s => new OrNode(subname, s))
attachnodes(andnd, ornds)
Some(ornds.map(_.nodeID))
case None =>
None
}
} catch {
case (e:RulesUtil.LookupError) =>
println("index out of range : " + p)
None
}
def applyrulegen(nd: ProofNode,
p: Position,
rl: ProofRule): Option[List[NodeID]] = {
nd match {
case ornd@OrNode(_,_) => applyrule(ornd, p, rl)
case _ => None
}
}
// Returns true if successfully submitted.
def submitproc(ornd: OrNode, proc: String): Boolean
= procs.get(proc) match {
case None => false
case Some(pr) =>
if(pr.applies(ornd.goal)) {
val wknd = new WorkingNode(proc,ornd.goal)
attachnode(ornd, wknd)
jobmaster ! (('job, proc, ornd.goal, wknd.nodeID))
val t = System.currentTimeMillis
jobs.put(wknd.nodeID, t)
true
} else {
false
}
}
/* crawl the tree to update statuses.
* propagateProvedUp is called on nd when a child of nd is proved.
*/
def propagateProvedUp(ndID: NodeID, from: NodeID): Unit = {
val nd = getnode(ndID)
nd match {
case AndNode(r,g,svs) =>
val others = nd.getChildren.filterNot( _ == from)
val os = others.map(getnode).map(_.getStatus)
os.find( _ != Proved) match {
case None =>
nd.setStatus(Proved)
treemodel.map(_.fireChanged(nd)) // GUI
nd.getParent match {
case Some(p) =>
propagateProvedUp(p, ndID)
case None =>
}
case Some(_) =>
// Collapse the newly proved child.
if (expandNewNodes) {
treemodel.map(_.fireProved(getnode(from))) // GUI
}
}
case OrNode(r,g) =>
nd.setStatus(Proved)
treemodel.map(_.fireChanged(nd)) // GUI
val others = nd.getChildren.filterNot( _ == from)
others.map(x => propagateIrrelevantDown(x))
nd.getParent match {
case Some(p) =>
propagateProvedUp(p, ndID)
case None =>
}
case DoneNode(r,g) =>
// shouldn't ever happen
throw new Error("bad call of propagateProvedUp")
}
}
// called when ndID becomes irrelevant.
def propagateIrrelevantDown(ndID: NodeID) : Unit = {
val nd = getnode(ndID)
nd.setStatus(Irrelevant(nd.getStatus))
treemodel.map(_.fireChanged(nd)) //GUI
jobs.get(ndID) match {
case Some(t) =>
jobs.remove(ndID)
jobmaster ! ('abort, ndID)
case None =>
}
nd.getChildren.map(propagateIrrelevantDown)
}
}
object BlockingActorScheduler {
lazy val scheduler = {
val s = new scala.actors.scheduler.ResizableThreadPoolScheduler(false)
s.start()
s
}
}
trait BlockingActor extends Actor {
override def scheduler = BlockingActorScheduler.scheduler
}
// WorkerTracers have two purposes. The first is to record the ouput
// of worker subprocesses, which do not have their own terminal. The
// second is to ensure that these subprocesses get scheduled by
// demanding input from them. TODO: understand why that, without these
// tracers, workers can sometimes make no progress.
class WorkerTracer(id: Int, ins: InputStream) extends BlockingActor {
def act(): Unit = {
try
{
val f = new File("worker" + id + ".out");
val out = new FileOutputStream(f);
val buf =new Array[Byte](1024);
var len = ins.read(buf)
while (len > 0) {
out.write(buf,0,len)
len = ins.read(buf)
}
out.close();
ins.close();
println("created trace in " + f)
}
catch { case (e : Throwable) => println("caught while tracing: " + e) }
}
}
class FrontActor(mberepl: Option[scala.tools.nsc.interpreter.ILoop])
extends Actor {
import TreeActions._
import RulesUtil._
import Tactics.Tactic
// val scriptRunner = scala.tools.nsc.ScriptRunner
var scripttactic = Tactics.nilT
def act(): Unit = try {
println("acting")
link(jobmaster)
while(true) {
receive {
case 'quit =>
println("frontactor quitting")
for((jid,t) <- jobs){
jobmaster ! ('abort, jid)
}
jobmaster !? 'quit
sender ! ()
// TODO: It would be better to just |exit|,
// but how then to kill the REPL frontend?
// HACK:
// If we're going to System.exit here,
// we don't want the worker 'quit messages to
// get cut off. So sleep to allow them to
// work through the pipes.
Thread.sleep(500)
System.exit(0)
exit
case 'gui =>
val fe = KeYmaeraD.GUI.FE.start(self)
sender ! ()
case ('registergui, tm: KeYmaeraD.GUI.TreeModel) =>
treemodel = Some(tm)
case ('findworkers, number:Int) =>
var i = 1
while (i <= number) {
println("starting worker " + i)
val pb = new ProcessBuilder("./runworker",
"-cp",
myPort.toString)
pb.redirectErrorStream(true)
val p = pb.start()
val wt = new WorkerTracer(i, p.getInputStream())
link(wt)
wt.start()
i += 1
}
println("started workers")
sender ! ()
case 'here =>
displayThisNode
sender ! ()
case 'reload =>
reloadfile
sender ! ()
case ('load, filename:String) =>
loadfile(filename)
sender ! ()
case ('loadex, filename:String) =>
loadfile("examples/" + filename)
sender ! ()
case ('show, nd: NodeID) =>
getnodethen(nd, shownode _)
sender ! ()
case ('goto, nd: NodeID) =>
getnodethen(nd, gotonode _)
getnodethen(nd, shownode _)
sender ! ()
case 'gotoroot =>
gotonode(rootNode)
shownode(rootNode)
sender ! ()
case 'rootproved =>
rootNode.getStatus match {
case Proved => sender ! true
case _ => sender ! false
}
case ('setexpandnewnodes, b : Boolean) =>
expandNewNodes = b
sender ! ()
case 'expandopenbranches =>
// Only expand the first ten.
var openLeaves = getOpenLeaves(hereNode).take(10)
openLeaves.map(lv => treemodel.map(_.makeNodeVisible(lv)))
sender ! ()
case ('rule, rl: ProofRule, pos: Position) =>
val r = applyrulegen(hereNode,pos,rl)
r match {
case Some(_) =>
println("success")
case None =>
println("rule cannot be applied there")
}
sender ! ()
case ('rule, nd: NodeID, pos: Position, rl: ProofRule) =>
sender ! applyrulegen(getnode(nd), pos, rl)
case ('tactic, tct: Tactic) =>
hereNode match {
case ornd@OrNode(_,_) =>
tct(ornd)
case _ =>
println("cannot apply tactic here")
}
sender ! ()
case ('ASYNCsetscripttactic, tct: Tactic) =>
scripttactic = tct
case 'runscripttactic =>
gotonode(rootNode)
hereNode match {
case ornd@OrNode(_,_) =>
scripttactic(ornd)
case _ =>
println("cannot apply tactic here")
}
sender ! ()
case ('job, proc: String) =>
hereNode match {
case ornd@OrNode(r,sq) =>
val res = submitproc(ornd, proc)
if(res) ()
else println("procedure " + proc + " does not apply here.")
case _ =>
println("can only do a procedure on a ornode")
}
sender ! ()
case ('jobdone, ndID: NodeID, sq : Sequent) =>
(jobs.get(ndID), nodeTable.get(ndID), sq) match {
case (None, _, _ ) =>
()
// Proved.
case (Some(t), Some(nd), Sequent(_, Nil, List(True))) =>
jobs.remove(ndID)
nd.getParent match {
case Some(ptid) =>
val pt = getnode(ptid)
nd.setStatus(Proved)
propagateProvedUp(pt.nodeID, nd.nodeID)
case None =>
throw new Error("no parent")
}
// Disproved.
case (Some(t), Some(nd), Sequent(_, Nil, Nil)) =>
jobs.remove(ndID)
nd.setStatus(Disproved)
treemodel.map(_.fireChanged(nd)) //GUI
// Nothing to report
case (Some(t), Some(nd), _) =>
throw new Error("proc should return True or False")
case (Some(t), None, _) =>
throw new Error("node not in nodeTable")
}
// Aborted job.
case ('jobdone, ndID : NodeID) =>
nodeTable.get(ndID) match {
case None =>
()
case Some(nd) =>
jobs.remove(ndID)
nd.setStatus(Aborted)
treemodel.map(_.fireChanged(nd)) //GUI
}
case 'jobs =>
println(jobs.toList)
sender ! ()
case ('abort, jb: NodeID) =>
println("aborting job")
jobmaster ! ('abort, jb)
sender ! ()
case ('abortall) =>
for((jid,t) <- jobs){
jobmaster ! ('abort, jid)
}
sender ! ()
case msg =>
println("got message: " + msg)
sender ! ()
}
}
} catch {
case (e : Throwable) =>
println( "can't do that due to " + e)
}
def displayThisNode : Unit = {
shownode(hereNode)
}
// def parseformula(fms : String) : Formula
def reloadfile : Unit = {
sourceFileName match {
case None => ()
case Some(filename) => loadfile(filename)
}
}
def loadfile(filename: String) : Unit = {
// switch on the suffix of the filename.
if (filename.endsWith(".scala")||
filename.endsWith(".proof")) {
// Strip away the suffix.
val problemfilename = filename.substring(0, filename.length - 6)
println("loading file " + problemfilename)
loadfile(problemfilename)
mberepl match {
case Some(repl) =>
val res1 = repl.command(":load " + filename)
val res2 =
repl.command("frontactor ! (('ASYNCsetscripttactic, Script.main))")
println("Press cmd-u to use the loaded script.")
case None => ()
}
} else {
var fi: java.io.FileInputStream = null
try {
// kill pending jobs.
for( (ndID, t) <- jobs) {
jobmaster ! ('abort, ndID)
}
jobs.clear
fi =
new java.io.FileInputStream(filename)
if (filename.endsWith(".dl")) {
val dlp = new DLParser(fi)
dlp.result match {
case Some(g) =>
val nd = new OrNode("loaded from " + filename, g)
register(nd)
hereNode = nd
rootNode = nd
treemodel.map(_.fireNewRoot(nd))// GUI
case None =>
val nd = new OrNode("failed to parse file " + filename, Sequent(scala.collection.immutable.HashMap.empty, Nil, Nil))
register(nd)
hereNode = nd
rootNode = nd
treemodel.map(_.fireNewRoot(nd))// GUI
println("failed to parse file " + filename)
//@TODO Display an error. Notify the GUI of the error, which should display the message
}
}
else if (filename.endsWith(".key")){
val keyp = new KEYParser(fi)
keyp.result match {
case Some(g) =>
val nd = new OrNode("loaded from " + filename, g)
register(nd)
hereNode = nd
rootNode = nd
treemodel.map(_.fireNewRoot(nd))// GUI
case None =>
val nd = new OrNode("failed to parse file " + filename, Sequent(scala.collection.immutable.HashMap.empty, Nil, Nil))
register(nd)
hereNode = nd
rootNode = nd
treemodel.map(_.fireNewRoot(nd))// GUI
println("failed to parse file " + filename)
}
}
()
} catch {
case (e : Throwable) =>
println("failed to load file " + filename)
println("due to " + e)
} finally { if (fi != null) fi.close() }
}
// Do this last, so that if we are loading a script,
// that's what we remember.
sourceFileName = Some(filename)
}
}
| keymaerad/KeYmaeraD | frontactor.scala | Scala | bsd-3-clause | 15,856 |
/*
* Copyright 2013 Julian Peeters
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package artisanal.pickle.maker
class Position {
var current = 0
}
| julianpeeters/artisanal-pickle-maker | src/main/scala/resources/Position.scala | Scala | apache-2.0 | 673 |
package org.scalacheck.time
/** Stub trait since Scala Native does not have native support for java.time types. */
private[scalacheck] trait JavaTimeShrink
| rickynils/scalacheck | native/src/main/scala/org/scalacheck/time/JavaTimeShrink.scala | Scala | bsd-3-clause | 157 |
package fr.univnantes.vroom.core
import fr.univnantes.vroom.core.dto._
import fr.univnantes.vroom.core.dto.materiel.{MaterielDTO, MaterielFixeDTO, MaterielMobileDTO}
import fr.univnantes.vroom.core.dto.tarifs._
import fr.univnantes.vroom.core.persistable._
import fr.univnantes.vroom.core.persistable.materiel.MaterielMobile
import fr.univnantes.vroom.core.persistable.tarifs._
/**
* Classe représentant l'environnement d'exécution du logiciel
*/
class Systeme() {
/**
*
*/
private var _salles: Set[Persistable] = Set()
/**
*
*/
private var _reservations: Set[Persistable] = Set()
/**
*
*/
private var _batiments: Set[Persistable] = Set()
/**
*
*/
private var _demandeurs: Set[Persistable] = Set()
/**
*
*/
private var _typesDeTarif: Set[Persistable] = Set()
/**
*
*/
private var _materiel_disponibles_fixe: Set[Persistable] = Set()
/**
*
*/
private var _materiel_disponibles_mobile: Set[Persistable] = Set()
/**
* Methode permettant l'ajout de matériel Mobile disponible
* @param materiel Le materiel à supprimer de la liste
* @return
*/
def addMaterielMobileDisponible(materiel: MaterielMobileDTO) = _materiel_disponibles_mobile += DTOManager.dtoToObject(materiel.code, materiel)
/**
* Methode permettant la suppression de matériel Mobile disponible
* @param materiel Le materiel à supprimer de la liste
* @return
*/
def popMaterielMobileDisponible(materiel: MaterielMobileDTO) = {
_materiel_disponibles_mobile -= DTOManager.dtoToObject(materiel.code, materiel)
DTOManager.deleteDto(materiel.code)
}
/**
* Retourne l'ensemble des DTO des materiels mobiles disponible pour une consultation des données
* @return Retourne l'ensemble des DTO des materiels mobiles disponibles
*/
def viewMaterielMobileDisponible(): Set[DataTransfertObject] = {
_materiel_disponibles_mobile.collect { case x: Persistable => x.toDTO }
}
/**
* Methode permettant l'ajout de matériel fixe disponible
* @param materiel Le materiel à supprimer de la liste
* @return
*/
def addMaterieFixeDisponible(materiel: MaterielFixeDTO) = _materiel_disponibles_fixe += DTOManager.dtoToObject(materiel.code, materiel)
/**
* Methode permettant la suppression de matériel Mobile disponible
* @param materiel Le materiel à supprimer de la liste
* @return
*/
def popMaterielFixeDisponible(materiel: MaterielFixeDTO) = {
_materiel_disponibles_fixe -= DTOManager.dtoToObject(materiel.code, materiel)
DTOManager.deleteDto(materiel.code)
}
/**
* Retourne l'ensemble des DTO des materiels fixe disponible pour une consultation des données
* @return Retourne l'ensemble des DTO des materiels fixes disponibles
*/
def viewMaterielFixeDisponible(): Set[DataTransfertObject] = {
_materiel_disponibles_fixe.collect { case x: Persistable => x.toDTO }
}
/**
* Ajoute une nouvelle salle
* @param salle La salle à ajouter
*/
def addSalle(salle: SalleDTO): Unit = _salles += DTOManager.dtoToObject(salle.no_salle, salle)
/**
* Supprime une salle
* @param salle La salle à supprimer
*/
def popSalle(salle: SalleDTO): Unit = {
_salles -= DTOManager.dtoToObject(salle.no_salle, salle)
DTOManager.deleteDto(salle.no_salle)
}
/**
* Retourne l'ensemble des DTO Salle pour une consultation des données
* @return Retourne l'ensemble des DTO Salle
*/
def viewSalle(): Set[DataTransfertObject] = {
_salles.collect { case x: Persistable => x.toDTO }
}
/**
* Methode permettant d'ajouter du matériel dans une salle
* @param salle La salle dont on doit ajouter le materiel fixe
* @param materiel Le materiel Fixe à ajouter
*/
def addMaterielFixe(salle: SalleDTO, materiel: MaterielFixeDTO): DataTransfertObject = {
DTOManager.dtoToObject(salle.no_salle, salle).asInstanceOf[Salle].addMateriel(materiel.toObject)
DTOManager.dtoToObject(salle.no_salle, salle).toDTO
}
/**
* Methode permettant de supprimer du matériel dans une salle
* @param salle La salle dont on doit ajouter le materiel fixe
* @param materiel Le materiel Fixe à ajouter
*
*/
def popMaterielFixe(salle: SalleDTO, materiel: MaterielFixeDTO): DataTransfertObject = {
DTOManager.dtoToObject(salle.no_salle, salle).asInstanceOf[Salle].popMateriel(materiel.toObject)
DTOManager.dtoToObject(salle.no_salle, salle).toDTO
}
/**
* Ajoute une nouvelle réservation
* @param reservation La réservation à ajouter
*/
def addReservation(reservation: ReservationDTO): Unit = _reservations += DTOManager.dtoToObject(reservation.ref_resa, reservation)
/**
* Supprime une réservation
* @param reservation La réservation à supprimer
*/
def popReservation(reservation: ReservationDTO): Unit = {
_reservations -= DTOManager.dtoToObject(reservation.ref_resa, reservation)
DTOManager.deleteDto(reservation.ref_resa)
}
/**
* Retourne l'ensemble des DTO réservation pour une consultation des données
* @return Retourne l'ensemble des DTO réservation
*/
def viewReservation(): Set[DataTransfertObject] = {
_reservations.collect { case x: Persistable => x.toDTO }
}
/**
* Methode permettant l'ajout d'un materiel mobile dans une réservation
* @param reservation Objet DTO représentant une réservation
* @param materiel Objet DTO représentant un materiel
* @return Retourne un DTO
*/
def addMaterielMobile(reservation: ReservationDTO, materiel: MaterielDTO): DataTransfertObject = {
DTOManager.dtoToObject(reservation.ref_resa, reservation).asInstanceOf[Reservation].addMateriel(materiel.toObject.asInstanceOf[MaterielMobile])
DTOManager.dtoToObject(reservation.ref_resa, reservation).toDTO
}
/**
* Methode permettant la suppresion d'un materiel mobile dans une réservation
* @param reservation Objet DTO représentant une réservation
* @param materiel Objet DTO représentant un materiel
* @return Retourne un DTO
*/
def popMaterielMobile(reservation: ReservationDTO, materiel: MaterielMobileDTO): DataTransfertObject = {
DTOManager.dtoToObject(reservation.ref_resa, reservation).asInstanceOf[Reservation].popMateriel(materiel.toObject)
DTOManager.dtoToObject(reservation.ref_resa, reservation).toDTO
}
/**
* Ajoute un nouveau batiment
* @param batiment Le batiment à ajouter
*/
def addBatiment(batiment: BatimentDTO): Unit = _batiments += DTOManager.dtoToObject(batiment.no_bat, batiment)
/**
* Supprime un batiment
* @param batiment Le batiment à supprimer
*/
def popBatiment(batiment: BatimentDTO): Unit = {
_batiments -= DTOManager.dtoToObject(batiment.no_bat, batiment)
DTOManager.deleteDto(batiment.no_bat)
}
/**
* Retourne l'ensemble des DTO Batiment pour une consultation des données
* @return Retourne l'ensemble des DTO Batiment
*/
def viewBatiment(): Set[DataTransfertObject] = {
_batiments.collect { case x: Persistable => x.toDTO }
}
/**
* Ajoute un nouveau demandeur
* @param demandeur La réservation à ajouter
*/
def addDemandeur(demandeur: DemandeurDTO): Unit = _demandeurs += DTOManager.dtoToObject(demandeur.no_dem, demandeur)
/**
* Supprime un demandeur
* @param demandeur Le demandeur à supprimer
*/
def popDemandeur(demandeur: DemandeurDTO): Unit = {
_demandeurs -= DTOManager.dtoToObject(demandeur.no_dem, demandeur)
DTOManager.deleteDto(demandeur.no_dem)
}
/**
* Retourne l'ensemble des DTO Demandeur pour une consultation des données
* @return Retourne l'ensemble des DTO Demandeur
*/
def viewDemandeur(): Set[DataTransfertObject] = {
_demandeurs.collect { case x: Persistable => x.toDTO }
}
/**
* Ajoute une nouvelle salle
* @param tarif La salle à ajouter
*/
def addTarif(tarif: TarifDTO): Unit = _typesDeTarif += DTOManager.dtoToObject(tarif.code, tarif)
/**
* Supprime une salle
* @param tarif La salle à supprimer
*/
def popTarif(tarif: TarifDTO): Unit = {
_typesDeTarif -= DTOManager.dtoToObject(tarif.code, tarif)
DTOManager.deleteDto(tarif.code)
}
/**
* Retourne l'ensemble des DTO Types de tarifs pour une consultation des données
* @return Retourne l'ensemble des DTO Types de tarifs
*/
def viewTarifs(): Set[DataTransfertObject] = {
_typesDeTarif.collect { case x: Persistable => x.toDTO }
}
/**
* Recherche les réservations respectant un prédicat
* @param predicat Prédicat de recherche d'une reservation
* @return L'ensemble des réservations au format DTO respectant le prédicat
*/
def searchReservation(predicat: (ReservationDTO) => Boolean): Set[ReservationDTO] = {
val muted_set: Set[ReservationDTO] = _reservations collect {
case reserv: Reservation => reserv.toDTO
}
muted_set.filter(predicat)
}
/**
* Recherche les salles respectant un prédicat
* @param predicat Prédicat de recherche d'une salle
* @return L'ensemble des salles au format DTO respectant le prédicat
*/
def searchSalle(predicat: (SalleDTO) => Boolean): Set[SalleDTO] = {
val muted_set: Set[SalleDTO] = _salles collect {
case salle: Salle => salle.toDTO
}
muted_set.filter(predicat)
}
/**
* Recherche les bâtiments respectant un prédicat
* @param predicat Prédicat de recherche d'un bâtiment
* @return L'ensemble des bâtiments au format DTO respectant le prédicat
*/
def searchBatiment(predicat: (BatimentDTO) => Boolean): Set[BatimentDTO] = {
val muted_set: Set[BatimentDTO] = _batiments collect {
case batiment: Batiment => batiment.toDTO
}
muted_set.filter(predicat)
}
/**
* Recherche les demandeurs respectant un prédicat
* @param predicat Prédicat de recherche d'un demandeur
* @return L'ensemble des demandeurs au format DTO respectant le prédicat
*/
def searchDemandeur(predicat: (DemandeurDTO) => Boolean): Set[DemandeurDTO] = {
val muted_set: Set[DemandeurDTO] = _demandeurs collect {
case demandeur: Demandeur => demandeur.toDTO
}
muted_set.filter(predicat)
}
/**
* Recherche les Matériels Mobile disponible
* @param search_pred Prédicat de recherche
* @return Retourne un Set de DTO
*/
def searchMaterielMobileDisponible(search_pred: (MaterielMobileDTO) => Boolean): Set[MaterielMobileDTO] = {
val muted_set = _materiel_disponibles_fixe.collect {
case mat: MaterielMobileDTO => mat.toDTO.asInstanceOf[MaterielMobileDTO]
}
muted_set.filter(search_pred)
}
/**
* Recherche les Matériels Fixe disponible
* @param search_pred Prédicat de recherche
* @return Retourne un Set de DTO
*/
def searchMaterielFixeDisponible(search_pred: (MaterielFixeDTO) => Boolean): Set[MaterielFixeDTO] = {
val muted_set = _materiel_disponibles_fixe.collect {
case mat: MaterielFixeDTO => mat.toDTO.asInstanceOf[MaterielFixeDTO]
}
muted_set.filter(search_pred)
}
/**
* Recherche les tarifs respectant un prédicat
* @param predicat Prédicat de recherche d'un tarif
* @return L'ensemble des tarifs au format DTO respectant le prédicat
*/
def searchTarif(predicat: (TarifDTO) => Boolean): Set[TarifDTO] = {
val muted_set: Set[TarifDTO] = _typesDeTarif collect {
case tarif: TarifDuree => tarif.toDTO
case tarif: TarifManifestation => tarif.toDTO
case tarif: TarifMateriel => tarif.toDTO
case tarif: TarifOrigine => tarif.toDTO
case tarif: TarifSalle => tarif.toDTO
case tarif: TarifTitre => tarif.toDTO
}
muted_set.filter(predicat)
}
}
| Callidon/v-room | src/main/scala/fr/univnantes/vroom/core/Systeme.scala | Scala | mit | 11,806 |
package io.github.hamsters
import scala.collection.immutable.Seq
import scala.meta._
class ShowMacro extends scala.annotation.StaticAnnotation {
inline def apply(defn: Any): Any = meta {
defn match {
//TODO handle object with existing companion
//case Seq(cls @ Defn.Class(_, name, _, ctor, template), companion: Defn.Object)=> q""""""
case cls @ Defn.Class(_, _, _, ctor, _) =>
val show =
q"""
implicit def showable = new io.github.hamsters.Showable[${cls.name}] { override def format(a: ${cls.name}) ={
import io.github.hamsters.ShowableSyntax._
${Lit.String(cls.name.value)}+ "(" + List(..${ctor.paramss.flatMap(_.map(pp => q"""${pp.name.syntax} + "=" + Show.show(a.${Term.Name(pp.name.value)})""" ))}).reduce(_ + "," + _) + ")" } }"""
val companion = q"object ${Term.Name(cls.name.value)} { $show }"
val res = Term.Block(Seq(cls, companion))
//abort(res.syntax)
res
case _ => abort(defn.pos, "Invalid annottee - you can only use @ShowMacro on case classes")
}
}
}
| dgouyette/hamsters | metas/shared/src/main/scala/io/github/hamsters/ShowMacro.scala | Scala | apache-2.0 | 1,098 |
package com.seanshubin.builder.domain
import akka.typed.ActorRef
import com.seanshubin.builder.domain.Event._
import com.seanshubin.uptodate.logic.SummaryReport
import scala.util.{Failure, Success, Try}
class DispatchResultHandler(actorRef: ActorRef[Event]) {
def foundLocalProjects(result: Try[Seq[String]]): Unit = {
result match {
case Success(names) => actorRef ! ProjectsFoundLocally(names)
case Failure(exception) => actorRef ! ErrorFindingProjectsLocally(exception)
}
}
def foundRemoteProjects(result: Try[Seq[String]]): Unit = {
result match {
case Success(names) => actorRef ! ProjectsFoundInGithub(names)
case Failure(exception) => actorRef ! ErrorFindingProjectsLocally(exception)
}
}
def finishedClone(projectName: String): Try[ProcessOutput] => Unit = {
case Success(processOutput) =>
if (processOutput.exitCode == 0) {
actorRef ! ProjectCloned(projectName)
} else {
actorRef ! FailedToClone(projectName, FailReason.ExitCode(processOutput.exitCode))
}
case Failure(exception) =>
actorRef ! FailedToClone(projectName, FailReason.ExceptionThrown(exception))
}
def finishedBuild(projectName: String): Try[ProcessOutput] => Unit = {
case Success(processOutput) =>
if (processOutput.exitCode == 0) {
actorRef ! ProjectBuilt(projectName)
} else {
actorRef ! FailedToBuild(projectName, FailReason.ExitCode(processOutput.exitCode))
}
case Failure(exception) =>
actorRef ! FailedToBuild(projectName, FailReason.ExceptionThrown(exception))
}
def finishedFetch(projectName: String): Try[ProcessOutput] => Unit = {
case Success(processOutput) =>
if (processOutput.exitCode == 0) {
actorRef ! ProjectFetched(projectName)
} else {
actorRef ! FailedToFetch(projectName, FailReason.ExitCode(processOutput.exitCode))
}
case Failure(exception) =>
actorRef ! FailedToFetch(projectName, FailReason.ExceptionThrown(exception))
}
def finishedMerge(projectName: String): Try[ProcessOutput] => Unit = {
case Success(processOutput) =>
if (processOutput.exitCode == 0) {
actorRef ! ProjectMerged(projectName)
} else {
actorRef ! FailedToMerge(projectName, FailReason.ExitCode(processOutput.exitCode))
}
case Failure(exception) =>
actorRef ! FailedToMerge(projectName, FailReason.ExceptionThrown(exception))
}
def finishedPush(projectName: String): Try[ProcessOutput] => Unit = {
case Success(processOutput) =>
if (processOutput.exitCode == 0) {
actorRef ! ProjectPushed(projectName)
} else {
actorRef ! FailedToPush(projectName, FailReason.ExitCode(processOutput.exitCode))
}
case Failure(exception) =>
actorRef ! FailedToPush(projectName, FailReason.ExceptionThrown(exception))
}
def finishedCheckingForPendingEdits(projectName: String): Try[ProcessOutput] => Unit = {
case Success(processOutput) =>
if (processOutput.exitCode == 0) {
if (processOutput.outputLines.isEmpty) {
if(ProjectOverrides.shouldBuild(projectName)){
actorRef ! Event.NoPendingEdits(projectName)
} else {
actorRef ! Event.Ignored(projectName)
}
} else {
actorRef ! Event.HasPendingEdits(projectName)
}
} else {
actorRef ! Event.FailedToGetPendingEdits(projectName, FailReason.ExitCode(processOutput.exitCode))
}
case Failure(exception) =>
actorRef ! Event.FailedToGetPendingEdits(projectName, FailReason.ExceptionThrown(exception))
}
def missingFromGithub(name: String): Unit = {
actorRef ! MissingFromGithub(name)
}
def unableToProcessProjectInThisState(project: String, state: ProjectState): Unit = {
actorRef ! UnableToProcessProjectInThisState(project, ClassUtil.getSimpleClassName(state))
}
def finishedUpgradingDependencies(projectName: String): Try[SummaryReport] => Unit = {
case Success(summaryReport) =>
if (summaryReport.updatesWereApplied) {
actorRef ! Event.UpdatesWereApplied(projectName)
} else {
actorRef ! Event.NoUpdatesWereNeeded(projectName)
}
case Failure(exception) =>
actorRef ! Event.FailedToUpgradeDependencies(projectName, FailReason.ExceptionThrown(exception))
}
def skipUpgradingDependencies(projectName: String): Unit = {
actorRef ! Event.NoUpdatesWereNeeded(projectName)
}
def finishedAdd(projectName: String): Try[ProcessOutput] => Unit = {
case Success(processOutput) =>
if (processOutput.exitCode == 0) {
actorRef ! Event.UpdatesAdded(projectName)
} else {
actorRef ! Event.FailedToAddUpdates(projectName, FailReason.ExitCode(processOutput.exitCode))
}
case Failure(exception) =>
actorRef ! Event.FailedToAddUpdates(projectName, FailReason.ExceptionThrown(exception))
}
def finishedCommit(projectName: String): Try[ProcessOutput] => Unit = {
case Success(processOutput) =>
if (processOutput.exitCode == 0) {
actorRef ! Event.UpdatesCommitted(projectName)
} else {
actorRef ! Event.FailedToCommitUpdates(projectName, FailReason.ExitCode(processOutput.exitCode))
}
case Failure(exception) =>
actorRef ! Event.FailedToCommitUpdates(projectName, FailReason.ExceptionThrown(exception))
}
def finishedPushAfterUpdate(projectName: String): Try[ProcessOutput] => Unit = {
case Success(processOutput) =>
if (processOutput.exitCode == 0) {
actorRef ! Event.UpdatesPushed(projectName)
} else {
actorRef ! Event.FailedToPushUpdates(projectName, FailReason.ExitCode(processOutput.exitCode))
}
case Failure(exception) =>
actorRef ! Event.FailedToPushUpdates(projectName, FailReason.ExceptionThrown(exception))
}
}
| SeanShubin/builder | domain/src/main/scala/com/seanshubin/builder/domain/DispatchResultHandler.scala | Scala | unlicense | 5,882 |
/*
* Copyright 2019 Spotify AB.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.spotify.scio.bigquery.types
import com.google.api.services.bigquery.model.TableReference
import com.spotify.scio.bigquery.client.BigQuery
import com.spotify.scio.bigquery.{Query, Table}
import org.scalatest.Assertion
import org.scalatest.flatspec.AnyFlatSpec
import org.scalatest.matchers.should.Matchers
import scala.annotation.StaticAnnotation
import scala.jdk.CollectionConverters._
import scala.reflect.runtime.universe._
object BigQueryTypeIT {
@BigQueryType.fromQuery(
"SELECT word, word_count FROM [bigquery-public-data:samples.shakespeare] WHERE word = 'Romeo'"
)
class LegacyT
@BigQueryType.fromQuery(
"SELECT word, word_count FROM `bigquery-public-data.samples.shakespeare` WHERE word = 'Romeo'"
)
class SqlT
@BigQueryType.fromTable("bigquery-public-data:samples.shakespeare")
class FromTableT
@BigQueryType.toTable
case class ToTableT(word: String, word_count: Int)
@BigQueryType.fromQuery(
"SELECT word, word_count FROM [data-integration-test:partition_a.table_%s]",
"$LATEST"
)
class LegacyLatestT
@BigQueryType.fromQuery(
"SELECT word, word_count FROM `data-integration-test.partition_a.table_%s`",
"$LATEST"
)
class SqlLatestT
@BigQueryType.fromQuery(
"""
|SELECT word, word_count
|FROM `data-integration-test.partition_a.table_%s`
|WHERE word_count > %3$d and word != '%%'
|LIMIT %d
""".stripMargin,
"$LATEST",
1,
1
)
class SqlLatestTWithMultiArgs
@BigQueryType.fromTable("data-integration-test:partition_a.table_%s", "$LATEST")
class FromTableLatestT
class Annotation1 extends StaticAnnotation
class Annotation2 extends StaticAnnotation
@Annotation1
@BigQueryType.fromTable("bigquery-public-data:samples.shakespeare")
@Annotation2
class ShakespeareWithSurroundingAnnotations
@BigQueryType.fromTable("bigquery-public-data:samples.shakespeare")
@Annotation1
@Annotation2
class ShakespeareWithSequentialAnnotations
}
// scio-test/it:runMain com.spotify.scio.PopulateTestData to re-populate data for integration tests
class BigQueryTypeIT extends AnyFlatSpec with Matchers {
import BigQueryTypeIT._
val bq: BigQuery = BigQuery.defaultInstance()
val legacyQuery =
"SELECT word, word_count FROM [bigquery-public-data:samples.shakespeare] WHERE word = 'Romeo'"
val sqlQuery =
"SELECT word, word_count FROM `bigquery-public-data.samples.shakespeare` WHERE word = 'Romeo'"
val legacyLatestQuery =
"SELECT word, word_count FROM [data-integration-test:partition_a.table_%s]"
val sqlLatestQuery =
"SELECT word, word_count FROM `data-integration-test.partition_a.table_%s`"
"fromQuery" should "work with legacy syntax" in {
val bqt = BigQueryType[LegacyT]
bqt.isQuery shouldBe true
bqt.isTable shouldBe false
bqt.query shouldBe Some(legacyQuery)
bqt.table shouldBe None
val fields = bqt.schema.getFields.asScala
fields.size shouldBe 2
fields.map(_.getName) shouldBe Seq("word", "word_count")
fields.map(_.getType) shouldBe Seq("STRING", "INTEGER")
fields.map(_.getMode) shouldBe Seq("REQUIRED", "REQUIRED")
}
it should "work with SQL syntax" in {
val bqt = BigQueryType[SqlT]
bqt.isQuery shouldBe true
bqt.isTable shouldBe false
bqt.query shouldBe Some(sqlQuery)
bqt.table shouldBe None
val fields = bqt.schema.getFields.asScala
fields.size shouldBe 2
fields.map(_.getName) shouldBe Seq("word", "word_count")
fields.map(_.getType) shouldBe Seq("STRING", "INTEGER")
fields.map(_.getMode) shouldBe Seq("NULLABLE", "NULLABLE")
}
it should "round trip rows with legacy syntax" in {
val bqt = BigQueryType[LegacyT]
val rows = bq.query.rows(legacyQuery).toList
val typed = Seq(LegacyT("Romeo", 117L))
rows.map(bqt.fromTableRow) shouldBe typed
typed.map(bqt.toTableRow).map(bqt.fromTableRow) shouldBe typed
}
it should "round trip rows with SQL syntax" in {
val bqt = BigQueryType[SqlT]
val rows = bq.query.rows(sqlQuery).toList
val typed = Seq(SqlT(Some("Romeo"), Some(117L)))
rows.map(bqt.fromTableRow) shouldBe typed
typed.map(bqt.toTableRow).map(bqt.fromTableRow) shouldBe typed
}
it should "work with legacy syntax with $LATEST" in {
BigQueryType[LegacyLatestT].query shouldBe Some(legacyLatestQuery)
}
it should "work with SQL syntax with $LATEST" in {
BigQueryType[SqlLatestT].query shouldBe Some(sqlLatestQuery)
}
it should "have query fn" in {
"""LegacyLatestT.query("TABLE")""" should compile
"""SqlLatestT.query("TABLE")""" should compile
}
it should "have query fn with only 1 argument" in {
"""LegacyLatestT.query("TABLE", 1)""" shouldNot typeCheck
"""SqlLatestT.query("TABLE", 1)""" shouldNot typeCheck
}
it should "have query fn with multiple arguments" in {
"""SqlLatestTWithMultiArgs.query("TABLE", 1, 1)""" should compile
"""SqlLatestTWithMultiArgs.query(1, "TABLE", 1)""" shouldNot typeCheck
}
it should "format query" in {
LegacyLatestT.query("TABLE") shouldBe legacyLatestQuery.format("TABLE")
SqlLatestT.query("TABLE") shouldBe sqlLatestQuery.format("TABLE")
}
it should "format and return query as source" in {
LegacyLatestT.queryAsSource("TABLE") shouldBe Query(legacyLatestQuery.format("TABLE"))
LegacyLatestT.queryAsSource("$LATEST").latest(bq) shouldBe Query(
legacyLatestQuery.format("$LATEST")
).latest(bq)
SqlLatestT.queryAsSource("TABLE") shouldBe Query(sqlLatestQuery.format("TABLE"))
SqlLatestT.queryAsSource("$LATEST").latest(bq) shouldBe Query(sqlLatestQuery.format("$LATEST"))
.latest(bq)
}
it should "resolve latest Table" in {
val tableReference = new TableReference
tableReference.setProjectId("data-integration-test")
tableReference.setDatasetId("partition_a")
tableReference.setTableId("table_$LATEST")
Table.Ref(tableReference).latest().ref.getTableId shouldBe "table_20170103"
Table
.Spec("data-integration-test:partition_a.table_$LATEST")
.latest()
.ref
.getTableId shouldBe "table_20170103"
}
it should "type check annotation arguments" in {
"""
| @BigQueryType.fromQuery(
| "SELECT word, word_count FROM `data-integration-test.partition_a.table_%s` LIMIT %d",
| "$LATEST",
| "1")
| class WrongFormatSupplied
""".stripMargin shouldNot compile
}
"fromTable" should "work" in {
val bqt = BigQueryType[FromTableT]
bqt.isQuery shouldBe false
bqt.isTable shouldBe true
bqt.query shouldBe None
bqt.table shouldBe Some("bigquery-public-data:samples.shakespeare")
val fields = bqt.schema.getFields.asScala
fields.size shouldBe 4
fields.map(_.getName) shouldBe Seq("word", "word_count", "corpus", "corpus_date")
fields.map(_.getType) shouldBe Seq("STRING", "INTEGER", "STRING", "INTEGER")
fields.map(_.getMode) shouldBe Seq("REQUIRED", "REQUIRED", "REQUIRED", "REQUIRED")
}
it should "work with $LATEST" in {
BigQueryType[FromTableLatestT].table shouldBe Some("data-integration-test:partition_a.table_%s")
}
def containsAllAnnotTypes[T: TypeTag]: Assertion = {
val types = typeOf[T].typeSymbol.annotations
.map(_.tree.tpe)
Seq(typeOf[Annotation1], typeOf[Annotation2])
.forall(lt => types.exists(rt => lt =:= rt)) shouldBe true
}
it should "preserve surrounding user defined annotations" in {
containsAllAnnotTypes[ShakespeareWithSurroundingAnnotations]
}
it should "preserve sequential user defined annotations" in {
containsAllAnnotTypes[ShakespeareWithSequentialAnnotations]
}
"toTable" should "work" in {
val bqt = BigQueryType[ToTableT]
bqt.isQuery shouldBe false
bqt.isTable shouldBe false
bqt.query shouldBe None
bqt.table shouldBe None
val fields = bqt.schema.getFields.asScala
fields.size shouldBe 2
fields.map(_.getName) shouldBe Seq("word", "word_count")
fields.map(_.getType) shouldBe Seq("STRING", "INTEGER")
fields.map(_.getMode) shouldBe Seq("REQUIRED", "REQUIRED")
}
}
| spotify/scio | scio-google-cloud-platform/src/it/scala/com/spotify/scio/bigquery/types/BigQueryTypeIT.scala | Scala | apache-2.0 | 8,701 |
/*
* Copyright 2009-2011 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb
package json
import java.util.Date
import org.specs2.mutable.Specification
object SerializationExamples extends Specification {
import Serialization.{read, write => swrite}
implicit val formats = Serialization.formats(NoTypeHints)
val project = Project("test", new Date, Some(Language("Scala", 2.75)), List(
Team("QA", List(Employee("John Doe", 5), Employee("Mike", 3))),
Team("Impl", List(Employee("Mark", 4), Employee("Mary", 5), Employee("Nick Noob", 1)))))
"Project serialization example" in {
val ser = swrite(project)
read[Project](ser) mustEqual project
}
case class Project(name: String, startDate: Date, lang: Option[Language], teams: List[Team])
case class Language(name: String, version: Double)
case class Team(role: String, members: List[Employee])
case class Employee(name: String, experience: Int)
"Null example" in {
val ser = swrite(Nullable(null))
read[Nullable](ser) mustEqual Nullable(null)
}
case class Nullable(name: String)
"Lotto serialization example" in {
import LottoExample.{Lotto, lotto}
val ser = swrite(lotto)
read[Lotto](ser) mustEqual lotto
}
"Primitive serialization example" in {
val primitives = Primitives(124, 123L, 126.5, 127.5.floatValue, "128", 's, 125, 129.byteValue, true)
val ser = swrite(primitives)
read[Primitives](ser) mustEqual primitives
}
"Multidimensional list example" in {
val ints = Ints(List(List(1, 2), List(3), List(4, 5)))
val ser = swrite(ints)
read[Ints](ser) mustEqual ints
}
"Map serialization example" in {
val p = PersonWithAddresses("joe", Map("address1" -> Address("Bulevard", "Helsinki"),
"address2" -> Address("Soho", "London")))
val ser = swrite(p)
read[PersonWithAddresses](ser) mustEqual p
}
"Recursive type serialization example" in {
val r1 = Rec(1, Nil)
val r2 = Rec(2, Nil)
val r3 = Rec(3, r1 :: r2 :: Nil)
val ser = swrite(r3)
read[Rec](ser) mustEqual r3
}
"Set serialization example" in {
val s = SetContainer(Set("foo", "bar"))
val ser = swrite(s)
read[SetContainer](ser) mustEqual s
}
"Array serialization example" in {
val s = ArrayContainer(Array("foo", "bar"))
val ser = swrite(s);
val unser = read[ArrayContainer](ser)
s.array.toList mustEqual unser.array.toList
}
"Seq serialization example" in {
val s = SeqContainer(List("foo", "bar"))
val ser = swrite(s)
read[SeqContainer](ser) mustEqual s
}
"Option serialization example" in {
val ser = swrite(Some(List(1, 2)))
(read[Option[List[Int]]](ser) mustEqual Some(List(1, 2))) and
(read[Option[List[Int]]]("") mustEqual None)
}
"None Option of tuple serialization example" in {
// This is a regression test case, failed in lift json
val s = OptionOfTupleOfDouble(None)
val ser = swrite(s)
read[OptionOfTupleOfDouble](ser) mustEqual s
}
"Case class with internal state example" in {
val m = Members("s", 1)
val ser = swrite(m)
(ser mustEqual """{"x":"s","y":1}""") and
(read[Members](ser) mustEqual m)
}
"Case class from type constructors example" in {
val p = ProperType(TypeConstructor(Chicken(10)), Pair(25, Player("joe")))
val ser = swrite(p)
read[ProperType](ser) mustEqual p
}
case class Ints(x: List[List[Int]])
case class Rec(n: Int, xs: List[Rec])
case class Members(x: String, y: Int) {
val foo1 = "foo"
lazy val foo2 = "foo"
}
}
object ShortTypeHintExamples extends TypeHintExamples {
implicit val formats = Serialization.formats(ShortTypeHints(classOf[Fish] :: classOf[Dog] :: Nil))
"Deserialization succeeds even if jsonClass is not the first field" in {
val ser = """{"animals":[],"pet":{"name":"pluto","jsonClass":"Dog"}}"""
Serialization.read[Animals](ser) mustEqual Animals(Nil, Dog("pluto"))
}
}
object FullTypeHintExamples extends TypeHintExamples {
import Serialization.{read, write => swrite}
implicit val formats = Serialization.formats(FullTypeHints(List[Class[_]](classOf[Animal], classOf[True], classOf[False], classOf[Falcon], classOf[Chicken])))
"Ambiguous field decomposition example" in {
val a = Ambiguous(False())
val ser = swrite(a)
read[Ambiguous](ser) mustEqual a
}
"Ambiguous parameterized field decomposition example" in {
val o = AmbiguousP(Chicken(23))
val ser = swrite(o)
read[AmbiguousP](ser) mustEqual o
}
"Option of ambiguous field decomposition example" in {
val o = OptionOfAmbiguous(Some(True()))
val ser = swrite(o)
read[OptionOfAmbiguous](ser) mustEqual o
}
"Option of ambiguous parameterized field decomposition example" in {
val o = OptionOfAmbiguousP(Some(Falcon(200.0)))
val ser = swrite(o)
read[OptionOfAmbiguousP](ser) mustEqual o
}
}
object CustomTypeHintFieldNameExample extends TypeHintExamples {
import Serialization.{read, write => swrite}
implicit val formats = new Formats {
val dateFormat = DefaultFormats.lossless.dateFormat
override val typeHints = ShortTypeHints(classOf[Fish] :: classOf[Dog] :: Nil)
override val typeHintFieldName = "$type$"
}
"Serialized JSON contains configured field name" in {
val animals = Animals(Dog("pluto") :: Fish(1.2) :: Nil, Dog("pluto"))
val ser = swrite(animals)
ser mustEqual """{"animals":[{"$type$":"Dog","name":"pluto"},{"$type$":"Fish","weight":1.2}],"pet":{"$type$":"Dog","name":"pluto"}}"""
}
}
trait TypeHintExamples extends Specification {
import Serialization.{read, write => swrite}
implicit val formats: Formats
"Polymorphic List serialization example" in {
val animals = Animals(Dog("pluto") :: Fish(1.2) :: Dog("devil") :: Nil, Dog("pluto"))
val ser = swrite(animals)
read[Animals](ser) mustEqual animals
}
"Parameterized type serialization example" in {
val objs = Objs(Obj(Fish(1.2)) :: Obj(Dog("pluto")) :: Nil)
val ser = swrite(objs)
read[Objs](ser) mustEqual objs
}
"Tuple serialization example" in {
val t: (Animal, Animal) = (Fish(1.5), Dog("pluto"))
val ser = swrite(t)
read[(Animal, Animal)](ser) mustEqual t
}
}
case class Animals(animals: List[Animal], pet: Animal)
trait Animal
case class Dog(name: String) extends Animal
case class Fish(weight: Double) extends Animal
case class Objs(objects: List[Obj[_]])
case class Obj[A](a: A)
object CustomSerializerExamples extends Specification {
import Serialization.{read, write => swrite}
import JsonAST._
import java.util.regex.Pattern
class IntervalSerializer extends CustomSerializer[Interval](format => (
{
case JObject(JField("start", JInt(s)) :: JField("end", JInt(e)) :: Nil) =>
new Interval(s.longValue, e.longValue)
},
{
case x: Interval =>
JObject(JField("start", JInt(BigInt(x.startTime))) ::
JField("end", JInt(BigInt(x.endTime))) :: Nil)
}
))
class PatternSerializer extends CustomSerializer[Pattern](format => (
{
case JObject(JField("$pattern", JString(s)) :: Nil) => Pattern.compile(s)
},
{
case x: Pattern => JObject(JField("$pattern", JString(x.pattern)) :: Nil)
}
))
class DateSerializer extends CustomSerializer[Date](format => (
{
case JObject(List(JField("$dt", JString(s)))) =>
format.dateFormat.parse(s).getOrElse(throw new MappingException("Can't parse "+ s + " to Date"))
},
{
case x: Date => JObject(JField("$dt", JString(format.dateFormat.format(x))) :: Nil)
}
))
class IndexedSeqSerializer extends Serializer[IndexedSeq[_]] {
def deserialize(implicit formats: Formats) = {
case (TypeInfo(clazz, ptype), json) if classOf[IndexedSeq[_]].isAssignableFrom(clazz) => json match {
case JArray(xs) =>
val t = ptype.getOrElse(throw new MappingException("parameterized type not known"))
xs.map(x => Extraction.extract(x, TypeInfo(t.getActualTypeArguments()(0).asInstanceOf[Class[_]], None))).toIndexedSeq
case x => throw new MappingException("Can't convert " + x + " to IndexedSeq")
}
}
def serialize(implicit formats: Formats) = {
case i: IndexedSeq[_] => JArray(i.map(Extraction.decompose).toList)
}
}
implicit val formats = Serialization.formats(NoTypeHints) +
new IntervalSerializer + new PatternSerializer + new DateSerializer + new IndexedSeqSerializer
"Interval serialization example" in {
val i = new Interval(1, 4)
val ser = swrite(i)
ser mustEqual """{"start":1,"end":4}"""
val i2 = read[Interval](ser)
i2.startTime mustEqual i.startTime
i2.endTime mustEqual i.endTime
}
"Pattern serialization example" in {
val pat = Pattern.compile("^Curly")
val pser = swrite(pat)
pser mustEqual """{"$pattern":"^Curly"}"""
read[Pattern](pser).pattern mustEqual pat.pattern
}
"Date serialization example" in {
val d = new Date(0)
val dser = swrite(d)
dser mustEqual """{"$dt":"1970-01-01T00:00:00.000Z"}"""
read[Date](dser) mustEqual d
}
"Indexed serialization example" in {
val xs = Indexed(Vector("a", "b", "c"))
val iser = swrite(xs)
iser mustEqual """{"xs":["a","b","c"]}"""
read[Indexed](iser).xs.toList mustEqual List("a","b","c")
}
}
case class Indexed(xs: IndexedSeq[String])
class Interval(start: Long, end: Long) {
val startTime = start
val endTime = end
}
object CustomClassWithTypeHintsExamples extends Specification {
import Serialization.{read, write => swrite}
import JsonAST._
val hints = new ShortTypeHints(classOf[DateTime] :: Nil) {
override def serialize: PartialFunction[Any, JObject] = {
case t: DateTime => JObject(JField("t", JInt(t.time)) :: Nil)
}
override def deserialize: PartialFunction[(String, JObject), Any] = {
case ("DateTime", JObject(JField("t", JInt(t)) :: Nil)) => new DateTime(t.longValue)
}
}
implicit val formats = Serialization.formats(hints)
"Custom class serialization using provided serialization and deserialization functions" in {
val m = Meeting("The place", new DateTime(1256681210802L))
val ser = swrite(m)
val m2 = read[Meeting](ser)
m.place mustEqual m2.place
m.time.time mustEqual m2.time.time
}
"List of custom classes example" in {
val ts = Times(List(new DateTime(123L), new DateTime(234L)))
val ser = swrite(ts)
val ts2 = read[Times](ser)
ts2.times(0).time mustEqual 123L
ts2.times(1).time mustEqual 234L
ts2.times.size mustEqual 2
}
}
case class Meeting(place: String, time: DateTime)
class DateTime(val time: Long)
case class Times(times: List[DateTime])
sealed abstract class Bool
case class True() extends Bool
case class False() extends Bool
case class Ambiguous(child: Bool)
trait Bird
case class Falcon(weight: Double) extends Bird
case class Chicken(eggs: Int) extends Bird
case class AmbiguousP(bird: Bird)
case class OptionOfAmbiguous(opt: Option[Bool])
case class OptionOfAmbiguousP(opt: Option[Bird])
case class SetContainer(set: Set[String])
case class ArrayContainer(array: Array[String])
case class SeqContainer(seq: Seq[String])
case class OptionOfTupleOfDouble(position: Option[Tuple2[Double, Double]])
case class Player(name: String)
case class TypeConstructor[A](x: A)
case class Pair[A, B](fst: A, snd: B)
case class ProperType(x: TypeConstructor[Chicken], t: Pair[Int, Player])
| sortable/framework | core/json/src/test/scala/net/liftweb/json/SerializationExamples.scala | Scala | apache-2.0 | 12,082 |
package physical.habitat
import org.scalatestplus.mockito.MockitoSugar
import org.scalatest.{FlatSpec, PrivateMethodTester}
/**
*
* Created by Steven Hawes on 17/02/2016.
*/
class HabitatManagerTest extends FlatSpec with MockitoSugar with PrivateMethodTester {
/* val list = Array(
new Coordinate(-29.112576,153.629551),
new Coordinate(-29.112201,153.658476),
new Coordinate(-29.128697,153.658905),
new Coordinate(-29.128922,153.63101),
new Coordinate(-29.112576,153.629551))
println("created array.....")
val gf = new GeometryFactory()
val polygon = gf.createPolygon(list)
val point = gf.createPoint(new Coordinate(-29.121274,153.63616))
val point_out = gf.createPoint(new Coordinate(-29.102576,153.63616))
val buffer = polygon.buffer(0.2)
println("The polygon contains the point "+polygon.contains(point))
println("The point is within the polygon "+point.within(polygon))
println("The point intersects the polygon "+polygon.intersects(point))
println("The area of the polygon is "+polygon.getArea)
println("The buffer contains the point "+buffer.contains(point))
println("The point is within the buffer "+point.within(buffer))
println("The point intersects the buffer "+buffer.intersects(point))
println("The area of the buffer is "+buffer.getArea)
println("The polygon contains the point_out "+polygon.contains(point_out))
println("The point_out is within the polygon "+point_out.within(polygon))
println("The point_out intersects the polygon "+polygon.intersects(point_out))
println("The buffer contains the point_out "+buffer.contains(point_out))
println("The point is within the buffer "+point_out.within(buffer))
println("The point_out intersects the buffer "+buffer.intersects(point_out))
println("Distance between points "+point_out.distance(point))*/
}
| shawes/zissou | src/test/scala/physical/habitat/HabitatManagerTest.scala | Scala | mit | 1,868 |
package pt.up.fe.luisfonseca.cp.ui
import android.content.Intent
import android.os.Bundle
object Util {
/**
* Converts an intent into a {@link Bundle} suitable for use as fragment
* arguments.
*
* @param intent
* @return the bundle with the argument
*/
def intentToFragmentArguments(intent: Intent): Bundle = {
val arguments = new Bundle();
if (intent == null) {
return arguments;
}
val data = intent.getData();
if (data != null) {
arguments.putParcelable("URL_INTENT", data);
}
val extras = intent.getExtras();
if (extras != null) {
arguments.putAll(intent.getExtras());
}
return arguments;
}
} | luismfonseca/cp-in-scala | src/pt/up/fe/luisfonseca/cp/ui/Util.scala | Scala | bsd-3-clause | 770 |
import scala.reflect.ClassTag
object Test extends App{
BooleanArrayClone;
ByteArrayClone;
ShortArrayClone;
CharArrayClone;
IntArrayClone;
LongArrayClone;
FloatArrayClone;
DoubleArrayClone;
ObjectArrayClone;
PolymorphicArrayClone;
}
object BooleanArrayClone{
val it : Array[Boolean] = Array(true, false);
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = false;
assert(it(0) == true)
}
object ByteArrayClone{
val it : Array[Byte] = Array(1, 0);
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = 0;
assert(it(0) == 1)
}
object ShortArrayClone{
val it : Array[Short] = Array(1, 0);
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = 0;
assert(it(0) == 1)
}
object CharArrayClone{
val it : Array[Char] = Array(1, 0);
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = 0;
assert(it(0) == 1)
}
object IntArrayClone{
val it : Array[Int] = Array(1, 0);
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = 0;
assert(it(0) == 1)
}
object LongArrayClone{
val it : Array[Long] = Array(1, 0);
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = 0;
assert(it(0) == 1)
}
object FloatArrayClone{
val it : Array[Float] = Array(1, 0);
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = 0;
assert(it(0) == 1)
}
object DoubleArrayClone{
val it : Array[Double] = Array(1, 0);
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = 0;
assert(it(0) == 1)
}
object ObjectArrayClone{
val it : Array[String] = Array("1", "0");
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = "0";
assert(it(0) == "1")
}
object PolymorphicArrayClone{
def testIt[T](it : Array[T], one : T, zero : T) = {
val cloned = it.clone();
assert(cloned.sameElements(it));
cloned(0) = zero;
assert(it(0) == one)
}
testIt(Array("one", "two"), "one", "two");
class Mangler[T: ClassTag](ts : T*){
// this will always be a BoxedAnyArray even after we've unboxed its contents.
val it = ts.toArray[T];
}
val mangled = new Mangler[Int](0, 1);
val y : Array[Int] = mangled.it; // make sure it's unboxed
testIt(mangled.it, 0, 1);
}
| scala/scala | test/files/run/arrayclone-new.scala | Scala | apache-2.0 | 2,303 |
object worksheet {import scala.runtime.WorksheetSupport._; def main(args: Array[String])=$execute{;$skip(62);
println("Welcome to the Scala worksheet")}
} | simula67/scala-coursera-martin-odersky | patmat/.worksheet/src/worksheet.scala | Scala | mpl-2.0 | 157 |
package pt.testing.whaa.ui.author
import android.app.ListFragment
import android.os.Bundle
import android.view.View
import android.widget.ListView
import org.scaloid.common._
import com.google.gson.Gson
import pt.testing.whaa.ui.ChangeToFragmentHandler
import pt.testing.whaa.models.Author
object AuthorListFragment {
val BUNDLE_MODEL_JSON = "model_json"
val MENU_ITEM_EDIT = 1
val MENU_ITEM_DELETE = 2
val REQUEST_EDIT = 1
def newInstance(model: Author): AuthorListFragment = {
val arguments = new Bundle()
arguments.putString(BUNDLE_MODEL_JSON, new Gson().toJson(model))
val fragment = new AuthorListFragment()
fragment.setArguments(arguments)
fragment
}
}
class AuthorListFragment extends ListFragment {
var mListAdapter: AuthorListAdapter = _
lazy val mItems: Array[Author] = {
// TODO: Load real object from database
(1 to 4).foldLeft(Array[Author]()) {
(acc, index) => {
acc :+ Author(
"Lorem ipsum dolor sit amet.",
5
)
}
}
}
override def onActivityCreated(bundle: Bundle): Unit = {
super.onActivityCreated(bundle)
getListView().setDividerHeight(0)
mListAdapter = new AuthorListAdapter(getActivity(), mItems)
setListAdapter(mListAdapter)
}
override def onListItemClick(listView: ListView, view: View, position: Int, id: Long) {
val authorFragment = AuthorFragment.newInstance(mItems(position))
(getActivity().asInstanceOf[ChangeToFragmentHandler]).onChangeToFragment(authorFragment)
}
}
| luismfonseca/agile-scala-android | src/sbt-test/agile-scala-android/scaffold-recursive/src/main/scala/pt/testing/whaa/ui/author/AuthorListFragment.scala | Scala | mit | 1,613 |
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** @author John Miller, Hao Peng, Zhe Jin
* @version 1.3
* @date Mon Jul 27 01:27:00 EDT 2015
* @see LICENSE (MIT style license file).
*/
package scalation.analytics.classifier
import java.util.Random
import scala.collection.mutable.{Set => SET, Map}
import scalation.graphalytics.Pair
import scalation.graphalytics.mutable.{MinSpanningTree, MGraph}
import scalation.linalgebra.{MatrixD, MatriI, MatrixI, VectorD, VectoI, VectorI}
import scalation.linalgebra.gen.{HMatrix2, HMatrix3, HMatrix4, HMatrix5}
import scalation.random.PermutedVecI
import scalation.random.RNGStream._
import scalation.relalgebra.Relation
import BayesClassifier.me_default
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `TANBayes` class implements an Integer-Based Tree Augmented Naive Bayes
* Classifier, which is a commonly used such classifier for discrete input data.
* The classifier is trained using a data matrix 'x' and a classification vector 'y'.
* Each data vector in the matrix is classified into one of 'k' classes numbered
* 0, ..., k-1. Prior probabilities are calculated based on the population of
* each class in the training-set. Relative posterior probabilities are computed
* by multiplying these by values computed using conditional probabilities. The
* classifier supports limited dependency between features/variables.
* -----------------------------------------------------------------------------
* @param x the integer-valued data vectors stored as rows of a matrix
* @param y the class vector, where y(l) = class for row l of the matrix, x(l)
* @param fn the names for all features/variables
* @param k the number of classes
* @param cn the names for all classes
* @param me use m-estimates (me == 0 => regular MLE estimates)
* @param vc the value count (number of distinct values) for each feature
*/
class TANBayes (x: MatriI, y: VectoI, fn: Array [String], k: Int, cn: Array [String],
me: Double = me_default, private var vc: VectoI = null)
extends BayesClassifier (x, y, fn, k, cn)
{
private val DEBUG = false // debug flag
private var parent = new VectorI (n) // vector holding the parent for each feature/variable
private val vcp = new VectorI (n) // value count for the parent
private val f_C = new VectorI (k) // frequency counts for classes 0, ..., k-1
private var p_C = new VectorD (k) // probabilities for classes 0, ..., k-1
private val f_CXP = new HMatrix4 [Int] (k, n) // conditional frequency counts for variable/feature j: xj
private val p_X_CP = new HMatrix4 [Double] (k, n) // conditional probabilities for variable/feature j: xj
private val N0 = 5.0 // parameter needed for smoothing
val tiny = 1E-9
if (vc == null) {
shiftToZero; vc = vc_fromData // set to default for binary data (2)
} // if
private val vca = vc.toArray
private val g_f_CXZ = new HMatrix5 [Int] (k, n, n, vca, vca) // joint frequency of C, X, and Z, where X, Z are features/columns
private val f_CXZ = new HMatrix5 [Int] (k, n, n, vca, vca)
private val g_f_CX = new HMatrix3 [Int] (k, n, vca) // joint frequency of C and X
private val f_CX = new HMatrix3 [Int] (k, n, vca)
private val g_f_C = new VectorI (k)
private val g_f_X = new HMatrix2[Int] (n, vca)
private val f_X = new HMatrix2[Int] (n, vca)
if (DEBUG) {
println ("value count vc = " + vc)
println ("value count vcp = " + vcp)
println ("parent features par = " + parent)
} // if
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Build the model by computing global frequencies.
* @param testStart beginning of test region (inclusive)
* @param testEnd end of test region (exclusive)
*/
def buildModel (testStart: Int = 0, testEnd: Int = 0): (Array [Boolean], DAG) =
{
// compute frequency values based on the entire dataset
frequenciesAll ()
val pp: Traversable [Array [Int]] = for (p <- parent) yield Array (p)
(Array.fill(n)(true), new DAG (pp.toArray))
} // buildModel
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Train the classifier by computing the probabilities for C, and the
* conditional probabilities for X_j.
* @param testStart starting index of test region (inclusive) used in cross-validation.
* @param testEnd ending index of test region. (exclusive) used in cross-validation.
*/
def train (testStart: Int = 0, testEnd: Int = 0)
{
computeParentQ ((testStart until testEnd).toArray)
computeVcp ()
f_CXP.alloc (vc, vcp)
p_X_CP.alloc (vc, vcp)
copyFreqCXP()
train2()
if (smooth) smoothParam(testEnd - testStart)
} // train
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Train the classifier by computing the probabilities for C, and the
* conditional probabilities for X_j. This is the quick version that uses
* the "subtraction" method to achieve efficiency.
* @param itest indices of the instances considered testing data
*/
def trainQ (itest: Array [Int])
{
computeParentQ (itest) // frequency computations are also done here
computeVcp()
f_CXP.alloc (vc, vcp)
p_X_CP.alloc (vc, vcp)
// only the joint frequencies of Class, X-feature, and its Parent needs to be copied, other frequencies were done in computeParentQ
copyFreqCXP ()
train2 ()
if (smooth) smoothParam (itest.size)
} // trainQ
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Train the classifier by computing the probabilities for C, and the
* conditional probabilities for X_j.
*/
private def train2 ()
{
p_C = f_C.toDouble / md // prior probability for class yi
for (i <- 0 until k; j <- 0 until n) { // for each class yi & feature xj
val me_vc = me / vc(j).toDouble
for (xj <- 0 until vc(j); xp <- 0 until vcp(j)) {
val d = if (parent(j) > -1) f_CX(i, parent(j), xp)
else f_C(i)
// for each value for feature j: xj, par(j): xp
p_X_CP(i, j, xj, xp) = (f_CXP(i, j, xj, xp) + me_vc) / (d + me)
} // for
} // for
if (DEBUG) {
println ("p_C = " + p_C) // P(C = i)
println ("p_X_CP = " + p_X_CP) // P(X_j = x | C = i)
} // if
} // train2
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute frequency counts using the entire data matrix
*/
def frequenciesAll ()
{
for (i <- 0 until m) {
val yi = y(i)
g_f_C(yi) += 1
for (j <- 0 until n) {
g_f_X(j, x(i, j)) += 1
g_f_CX(yi, j, x(i, j)) += 1
for (j2 <- j+1 until n) g_f_CXZ(yi, j, j2, x(i, j), x(i, j2)) += 1
} // for
} // for
for (c <- 0 until k; j <- 0 until n; j2 <- j+1 until n; xj <- 0 until vc(j); xj2 <- 0 until vc(j2)) {
g_f_CXZ(c, j2, j, xj2, xj) = g_f_CXZ(c, j, j2, xj, xj2)
} // for
} // frequenciesAll
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Perform smoothing operations on the learned parameters by using Dirichlet priors
* to compute the posterior probabilities of the parameters given the training dataset.
* @see citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.178.8884&rep=rep1&type=pdf
* @param testSize size of the test size
*/
private def smoothParam (testSize: Int = 0)
{
for (i <- 0 until k) {
p_C(i) *= m / (m + N0)
p_C(i) += N0 * k / (m + N0)
for (j <- 0 until n) {
val pj = parent(j)
for (xj <- 0 until vc(j); xp <- 0 until vcp(j)) {
val f_px = if (pj > -1) f_CX(i, pj, xp) else f_C(i)
// NOTE: two alternative priors, may work better for some datasets
// val theta0 = f_CXP(i, j, xj, xp) / (md - testSize)
// val theta0 = f_CX(i, j, xj) / (md - testSize)
val theta0 = f_X(j, xj) / (md - testSize)
p_X_CP(i, j, xj, xp) *= (f_px / (f_px + N0))
p_X_CP(i, j, xj, xp) += (N0 / (f_px + N0) * theta0)
} // for
} // for
} // for
} // smoothParam
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Clone/copy the values used in CMI calculations (most of them can also be
* used in the training process) from global freq variables
* (based on the entire dataset) into local ones (based on portions of the
* dataset).
*/
private def copyFreqCMI ()
{
for (i <- 0 until k) {
f_C(i) = g_f_C(i)
for (j <- x.range2; xj <- 0 until vc(j)) {
if (i == 0) f_X(j, xj) = g_f_X(j, xj)
f_CX(i, j, xj) = g_f_CX(i, j, xj)
for (j2 <- j+1 until n; xj2 <- 0 until vc(j2)) {
f_CXZ(i, j, j2, xj, xj2) = g_f_CXZ(i, j, j2, xj, xj2)
f_CXZ(i, j2, j, xj2, xj) = f_CXZ(i, j, j2, xj, xj2)
} // for
} // for
} // for
} // copyFreq
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Clone/copy the values from global freq variables into local ones.
*/
private def copyFreqCXP ()
{
for (i <- 0 until k; j <- x.range2; xj <- 0 until vc(j); xp <- 0 until vcp(j)) {
f_CXP(i, j, xj, xp) = if (parent(j) > -1) f_CXZ(i, j, parent(j), xj, xp)
else f_CX(i, j, xj)
} // for
} // copyFreq
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Decrement frequency counters used in CMI calculations based on the 'i'th
* row of the data matrix.
* @param i the index for current data row
*/
private def decrementCMI (i: Int)
{
val yi = y(i) // get the class for ith row
f_C(yi) -= 1 // decrement frequency for class yi
for (j <- x.range2) {
f_X(j, x(i, j)) -= 1
f_CX (yi, j, x(i, j)) -= 1
for (j2 <- j+1 until n) {
f_CXZ (yi, j, j2, x(i, j), x(i, j2)) -= 1
f_CXZ (yi, j2, j, x(i, j2), x(i, j)) -= 1
} // for
} // for
} // decrementCMI
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the parent of each feature based on the correlation matrix.
* Feature x_i is only a possible candidate for parent of feature x_j if i < j.
*/
def computeParentQ (itest: Array [Int])
{
val cmiMx = calcCMIQ (itest)
for (j1 <- 0 until n; j2 <- 0 until j1) cmiMx(j1, j2) = cmiMx(j2, j1)
val ch = Array.ofDim[SET[Int]] (n)
val elabel = Map [Pair, Double] ()
for (i <- 0 until n) ch(i) = SET((i + 1 until n): _*)
for (i <- 0 until n; j <- i + 1 until n) elabel += new Pair(i, j) -> cmiMx(i, j)
parent = VectorI (maxSpanningTree (ch, elabel).makeITree ())
} // computeParent
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create MaxSpanningTree from conditional mutual information
*/
def maxSpanningTree (ch: Array[SET[Int]], elabel: Map[(Int, Int), Double]): MinSpanningTree =
{
val g = new MGraph (ch, Array.ofDim(n), elabel)
new MinSpanningTree (g, false, false) // param 2 = false means max spanning tree
} // maxSpanningTree
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the value counts of each parent feature based on the parent vector.
*/
def computeVcp ()
{
vcp.set(1) // set default value count to 1
for (j <- 0 until n if (parent(j) > -1)) vcp(j) = vc(parent(j))
} // computeVcp
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute the conditional mutual information matrix
*/
def calcCMIQ (itest: Array [Int]): MatrixD =
{
val p_CXZ = new HMatrix5 [Double] (k, n, n, vca, vca) // joint probability of C, X, and Z, where X, Z are features/columns
val p_CX = new HMatrix3 [Double] (k, n, vca) // joint probability of C and X
var p_C: VectorD = null
copyFreqCMI ()
for (i <- itest) decrementCMI(i)
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Compute marginal and joint probabilities
*/
def probabilities ()
{
for (j <- 0 until n) {
for (xj <- 0 until vc(j)) {
//p_X(j, xj) = (f_X(j, xj)) / md
for (c <- 0 until k) {
p_CX(c, j, xj) = (f_CX(c, j, xj) + tiny) / md
for (j2 <- j + 1 until n; xj2 <- 0 until vc(j2)) {
p_CXZ(c, j, j2, xj, xj2) = (f_CXZ(c, j, j2, xj, xj2) + tiny) / md
} // for
} // for
} // for
} // for
} // probabilities
p_C = f_C.toDouble / m
probabilities ()
cmiJoint (p_C, p_CX, p_CXZ)
} // calcCMIQ
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Given a discrete data vector 'z', classify it returning the class number
* (0, ..., k-1) with the highest relative posterior probability.
* Return the best class, its name and its relative probability.
* @param z the data vector to classify
*/
def classify (z: VectoI): (Int, String, Double) =
{
val prob = new VectorD (p_C)
for (i <- 0 until k; j <- 0 until n) {
prob(i) *= (if (parent(j) > -1) p_X_CP(i, j, z(j), z(parent(j))) // P(X_j = z_j | C = i), parent
else p_X_CP(i, j, z(j), 0)) // P(X_j = z_j | C = i), no parent (other than the class)
} // for
if (DEBUG) println ("prob = " + prob)
val best = prob.argmax () // class with the highest relative posterior probability
(best, cn(best), prob(best)) // return the best class, its name and its probability
} // classify
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Reset or re-initialize the frequency tables and the probability tables.
*/
def reset ()
{
f_C.set (0)
f_CX.set (0)
f_X.set (0)
f_CXZ.set (0)
} // reset
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Return the parent.
*/
override def getParent: VectorI = parent
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Test the accuracy of the classified results by cross-validation, returning
* the accuracy. This version of cross-validation relies on "subtracting"
* frequencies from the previously stored global data to achieve efficiency.
* @param nx number of crosses and cross-validations (defaults to 10x).
*/
override def crossValidateRand (nx: Int = 10): Double =
{
//println("Calling efficient CV")
val testSize = size / nx
var sum = 0.0
val rng = new Random ()
val permutedVec = PermutedVecI (VectorI.range(0, size), ranStream)
val randOrder = permutedVec.igen
val itestA = randOrder.split(nx)
for (itest <- itestA) {
reset ()
trainQ (itest.asInstanceOf [Array [Int]])
sum += test (itest)
} // for
sum / nx.toDouble
} // crossValidateRand
} // TANBayes class
//::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `TANBayes` object is the companion object for the `TANBayes` class.
*/
object TANBayes
{
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** Create a `TANBayes` object, passing 'x' and 'y' together in one matrix.
* @param xy the data vectors along with their classifications stored as rows of a matrix
* @param fn the names of the features
* @param k the number of classes
* @param me use m-estimates (me == 0 => regular MLE estimates)
* @param vc the value count (number of distinct values) for each feature
*/
def apply (xy: MatriI, fn: Array [String], k: Int, cn: Array [String],
me: Double = me_default, vc: VectoI = null) =
{
new TANBayes (xy(0 until xy.dim1, 0 until xy.dim2 - 1), xy.col(xy.dim2 - 1), fn, k, cn, me, vc)
} // apply
} // TANBayes object
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `TANBayesTest` object is used to test the `TANBayes` class.
* Classify whether a car is more likely to be stolen (1) or not (1).
* @see www.inf.u-szeged.hu/~ormandi/ai2/06-AugNaiveBayes-example.pdf
* > run-main scalation.analytics.classifier.TANBayesTest
*/
object TANBayesTest extends App
{
// x0: Color: Red (1), Yellow (0)
// x1: Type: SUV (1), Sports (0)
// x2: Origin: Domestic (1), Imported (0)
// features: x0 x1 x2
val x = new MatrixI((10, 3), 1, 0, 1, // data matrix
1, 0, 1,
1, 0, 1,
0, 0, 1,
0, 0, 0,
0, 1, 0,
0, 1, 0,
0, 1, 1,
1, 1, 0,
1, 0, 0)
val y = VectorI (1, 0, 1, 0, 1, 0, 1, 0, 0, 1) // classification vector: 0(No), 1(Yes))
val fn = Array("Color", "Type", "Origin") // feature/variable names
val cn = Array("No", "Yes") // class names
println("xy = " + (x :^+ y))
println("---------------------------------------------------------------")
val tan = new TANBayes(x, y, fn, 2, cn) // create the classifier
// train the classifier ---------------------------------------------------
tan.train()
// test sample ------------------------------------------------------------
val z1 = VectorI (1, 0, 1) // existing data vector to classify
val z2 = VectorI (1, 1, 1) // new data vector to classify
println ("classify (" + z1 + ") = " + tan.classify (z1) + "\\n")
println ("classify (" + z2 + ") = " + tan.classify (z2) + "\\n")
tan.crossValidate () // cross validate the classifier
} // TANBayesTest object
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `TANBayesTest2` object is used to test the `TANBayes` class.
* Given whether a person is Fast and/or Strong, classify them as making C = 1
* or not making C = 0 the football team.
* > run-main scalation.analytics.classifier.TANBayesTest2
*/
object TANBayesTest2 extends App
{
// training-set -----------------------------------------------------------
// x0: Fast
// x1: Strong
// y: Classification (No/0, Yes/1)
// features: x0 x1 y
val xy = new MatrixI((10, 3), 1, 1, 1,
1, 1, 1,
1, 0, 1,
1, 0, 1,
1, 0, 0,
0, 1, 0,
0, 1, 0,
0, 1, 1,
0, 0, 0,
0, 0, 0)
val fn = Array ("Fast", "Strong") // feature names
val cn = Array ("No", "Yes") // class names
println("xy = " + xy)
println("---------------------------------------------------------------")
val tan = TANBayes(xy, fn, 2, cn, 1, null) // create the classifier
//tan.computeParent ()
// train the classifier ---------------------------------------------------
tan.train ()
// test sample ------------------------------------------------------------
val z = VectorI (1, 0) // new data vector to classify
println("classify (" + z + ") = " + tan.classify (z) + "\\n")
tan.crossValidate() // cross validate the classifier
} // TANBayesTest2 object
//:::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::
/** The `TANBayesTest3` object is used to test the `TANBayes` class.
* > run-main scalation.analytics.classifier.TANBayesTest3
*/
object TANBayesTest3 extends App
{
val filename = BASE_DIR + "breast-cancer.arff"
var data = Relation (filename, -1, null)
val xy = data.toMatriI2 (null)
val fn = data.colName.slice(0, xy.dim2 - 1).toArray
val cn = Array ("p", "e") // class names
val k = 2
println("---------------------------------------------------------------")
val tan = TANBayes (xy, fn, k, cn) // create the classifier
tan.buildModel ()
println("cv accu = " + tan.crossValidateRand())
} // TANBayesTest3 object
| NBKlepp/fda | scalation_1.3/scalation_modeling/src/main/scala/scalation/analytics/classifier/TANBayes.scala | Scala | mit | 22,195 |
package spire
package math
import spire.algebra.Order
/**
* Interface for a merging strategy object.
*/
trait Merge extends Any {
def merge[@sp A: Order: ClassTag](a:Array[A], b:Array[A]): Array[A]
}
/**
* Abstract class that can be used to implement custom binary merges with e.g. special collision behavior or an ordering
* that is not defined via an Order[T] typeclass
*/
abstract class BinaryMerge {
private[this] final def binarySearchB(ai: Int, b0: Int, b1: Int): Int = {
@tailrec
def binarySearch0(low: Int, high: Int): Int =
if (low <= high) {
val mid = (low + high) >>> 1
val c = compare(ai, mid)
if (c > 0)
binarySearch0(mid + 1, high)
else if (c < 0)
binarySearch0(low, mid - 1)
else
mid
} else -(low + 1)
binarySearch0(b0, b1 - 1)
}
/**
* Compare element ai of the first sequence with element bi of the second sequence
* @param ai an index into the first sequence
* @param bi an index into the second sequence
* @return -1 if a(ai) < b(bi), 0 if a(ai) == b(bi), 1 if a(ai) > b(bi)
*/
def compare(ai: Int, bi: Int): Int
/**
* Called when elements a(ai) and b(bi) are equal according to compare
* @param ai
* @param bi
*/
def collision(ai: Int, bi: Int): Unit
/**
* Called for a subsequence of elements of a that are not overlapping any element of b
*/
def fromA(a0: Int, a1: Int, bi: Int): Unit
/**
* Called for a subsequence of elements of b that are not overlapping any element of a
*/
def fromB(ai: Int, b0: Int, b1: Int): Unit
def merge0(a0: Int, a1: Int, b0: Int, b1: Int): Unit = {
if (a0 == a1) {
if (b0 != b1)
fromB(a0, b0, b1)
} else if (b0 == b1) {
fromA(a0, a1, b0)
} else {
val am = (a0 + a1) / 2
val res = binarySearchB(am, b0, b1)
if (res >= 0) {
// same elements
val bm = res
// merge everything below a(am) with everything below the found element
merge0(a0, am, b0, bm)
// add the elements a(am) and b(bm)
collision(am, bm)
// merge everything above a(am) with everything above the found element
merge0(am + 1, a1, bm + 1, b1)
} else {
val bm = -res - 1
// merge everything below a(am) with everything below the found insertion point
merge0(a0, am, b0, bm)
// add a(am)
fromA(am, am + 1, bm)
// everything above a(am) with everything above the found insertion point
merge0(am + 1, a1, bm, b1)
}
}
}
}
/**
* Merge that uses binary search to reduce the number of comparisons
*
* This can be orders of magnitude quicker than a linear merge for types that have a relatively expensive comparison
* operation (e.g. Rational, BigInt, tuples) and will not be much slower than linear merge even in the worst case for
* types that have a very fast comparison (e.g. Int)
*/
object BinaryMerge extends Merge {
def merge[@sp T: Order: ClassTag](a: Array[T], b: Array[T]): Array[T] = {
new ArrayBinaryMerge(a,b).result
}
/*
private[this] def resize[T:ClassTag](x:Array[T], n: Int): Array[T] = {
if (n == x.length)
x
else {
val t = Array.ofDim[T](n)
System.arraycopy(x, 0, t, 0, n)
t
}
}*/
private class ArrayBinaryMerge[@specialized T](a: Array[T], b: Array[T])(implicit o: Order[T], c: ClassTag[T]) extends BinaryMerge {
def compare(ai: Int, bi: Int): Int = o.compare(a(ai), b(bi))
def fromA(a0: Int, a1: Int, bi: Int): Unit = {
System.arraycopy(a, a0, r, ri, a1 - a0)
ri += a1 - a0
}
def fromB(ai: Int, b0: Int, b1: Int): Unit = {
System.arraycopy(b, b0, r, ri, b1 - b0)
ri += b1 - b0
}
def collision(ai: Int, bi: Int): Unit = {
r(ri) = a(ai)
ri += 1
r(ri) = b(bi)
ri += 1
}
val r = Array.ofDim[T](a.length + b.length)
var ri = 0
merge0(0, a.length, 0, b.length)
def result: Array[T] = r
}
}
/**
* Simple linear merge
*/
object LinearMerge extends Merge {
def merge[@sp T: Order : ClassTag](a: Array[T], b: Array[T]): Array[T] = {
val o = implicitly[Order[T]]
val r = Array.ofDim[T](a.length + b.length)
var ri = 0
var ai = 0
var bi = 0
while (ai < a.length && bi < b.length) {
val c = o.compare(a(ai), b(bi))
if (c < 0) {
r(ri) = a(ai)
ri += 1
ai += 1
} else if (c > 0) {
r(ri) = b(bi)
ri += 1
bi += 1
} else {
r(ri) = a(ai)
ri += 1
r(ri) = b(bi)
ri += 1
ai += 1
bi += 1
}
}
while (ai < a.length) {
r(ri) = a(ai)
ri += 1
ai += 1
}
while (bi < b.length) {
r(ri) = b(bi)
ri += 1
bi += 1
}
r
}
}
| non/spire | core/src/main/scala/spire/math/Merging.scala | Scala | mit | 4,861 |
package kmeans
import java.util.concurrent._
import scala.collection._
import org.scalatest.FunSuite
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import common._
import scala.math._
object KM extends KMeans
import KM._
@RunWith(classOf[JUnitRunner])
class KMeansSuite extends FunSuite {
def checkClassify(points: GenSeq[Point], means: GenSeq[Point], expected: GenMap[Point, GenSeq[Point]]) {
assert(classify(points, means) == expected,
s"classify($points, $means) should equal to $expected")
}
test("'classify should work for empty 'points' and empty 'means'") {
val points: GenSeq[Point] = IndexedSeq()
val means: GenSeq[Point] = IndexedSeq()
val expected = GenMap[Point, GenSeq[Point]]()
checkClassify(points, means, expected)
}
test("'classify' should work for empty 'points' and 'means' == GenSeq(Point(1,1,1))") {
val points: GenSeq[Point] = IndexedSeq()
val mean = new Point(1, 1, 1)
val means: GenSeq[Point] = IndexedSeq(mean)
val expected = GenMap[Point, GenSeq[Point]]((mean, GenSeq()))
checkClassify(points, means, expected)
}
test("'classify' should work for 'points' == GenSeq((1, 1, 0), (1, -1, 0), (-1, 1, 0), (-1, -1, 0)) and 'means' == GenSeq((0, 0, 0))") {
val p1 = new Point(1, 1, 0)
val p2 = new Point(1, -1, 0)
val p3 = new Point(-1, 1, 0)
val p4 = new Point(-1, -1, 0)
val points: GenSeq[Point] = IndexedSeq(p1, p2, p3, p4)
val mean = new Point(0, 0, 0)
val means: GenSeq[Point] = IndexedSeq(mean)
val expected = GenMap((mean, GenSeq(p1, p2, p3, p4)))
checkClassify(points, means, expected)
}
test("'classify' should work for 'points' == GenSeq((1, 1, 0), (1, -1, 0), (-1, 1, 0), (-1, -1, 0)) and 'means' == GenSeq((1, 0, 0), (-1, 0, 0))") {
val p1 = new Point(1, 1, 0)
val p2 = new Point(1, -1, 0)
val p3 = new Point(-1, 1, 0)
val p4 = new Point(-1, -1, 0)
val points: GenSeq[Point] = IndexedSeq(p1, p2, p3, p4)
val mean1 = new Point(1, 0, 0)
val mean2 = new Point(-1, 0, 0)
val means: GenSeq[Point] = IndexedSeq(mean1, mean2)
val expected = GenMap((mean1, GenSeq(p1, p2)), (mean2, GenSeq(p3, p4)))
checkClassify(points, means, expected)
}
def checkParClassify(points: GenSeq[Point], means: GenSeq[Point], expected: GenMap[Point, GenSeq[Point]]) {
assert(classify(points.par, means.par) == expected,
s"classify($points par, $means par) should equal to $expected")
}
test("'classify with data parallelism should work for empty 'points' and empty 'means'") {
val points: GenSeq[Point] = IndexedSeq()
val means: GenSeq[Point] = IndexedSeq()
val expected = GenMap[Point,GenSeq[Point]]()
checkParClassify(points, means, expected)
}
}
| mitochon/hexercise | src/mooc/parprog/week3kmeans/src/test/scala/kmeans/KMeansSuite.scala | Scala | mit | 2,769 |
package mesosphere.marathon
package core.matcher.base
import mesosphere.marathon.core.instance.Instance
import mesosphere.marathon.core.launcher.InstanceOp
import mesosphere.marathon.state.PathId
import org.apache.mesos.{Protos => Mesos}
import scala.concurrent.Future
object OfferMatcher {
/**
* A InstanceOp with a [[InstanceOpSource]].
*
* The [[InstanceOpSource]] is informed whether the op is ultimately send to Mesos or if it is rejected
* (e.g. by throttling logic).
*/
case class InstanceOpWithSource(source: InstanceOpSource, op: InstanceOp) {
def instanceId: Instance.Id = op.instanceId
def accept(): Unit = source.instanceOpAccepted(op)
def reject(reason: String): Unit = source.instanceOpRejected(op, reason)
}
/**
* Reply from an offer matcher to a MatchOffer. If the offer match could not match the offer in any way it should
* simply leave the opsWithSource collection empty.
*
* To increase fairness between matchers, each normal matcher should schedule as few operations as possible per
* offer per match, e.g. one for instance launches without reservations. Multiple launches could be used
* if the instances need to be co-located or if the operations are intrinsically dependent on each other.
* The OfferMultiplexer tries to summarize suitable matches from multiple offer matches into one response.
*
* A MatchedInstanceOps reply does not guarantee that these operations can actually be executed.
* The launcher of message should setup some kind of timeout mechanism and handle
* instanceOpAccepted/instanceOpRejected calls appropriately.
*
* @param offerId the identifier of the offer
* @param opsWithSource the ops that should be executed on that offer including the source of each op
* @param resendThisOffer true, if this offer could not be processed completely (e.g. timeout)
* and should be resend and processed again
*/
case class MatchedInstanceOps(offerId: Mesos.OfferID, opsWithSource: Seq[InstanceOpWithSource], resendThisOffer: Boolean = false) {
/** all included [InstanceOp] without the source information. */
val ops: Seq[InstanceOp] = opsWithSource.iterator.map(_.op).toSeq
}
object MatchedInstanceOps {
def noMatch(offerId: Mesos.OfferID, resendThisOffer: Boolean = false): MatchedInstanceOps =
new MatchedInstanceOps(offerId, Seq.empty[InstanceOpWithSource], resendThisOffer = resendThisOffer)
}
trait InstanceOpSource {
def instanceOpAccepted(instanceOp: InstanceOp): Unit
def instanceOpRejected(instanceOp: InstanceOp, reason: String): Unit
}
}
/**
* Tries to match offers with some instances.
*/
trait OfferMatcher {
/**
* Process offer and return the ops that this matcher wants to execute on this offer.
*
* The offer matcher can expect either a instanceOpAccepted or a instanceOpRejected call
* for every returned `org.apache.mesos.Protos.TaskInfo`.
*/
def matchOffer(offer: Mesos.Offer): Future[OfferMatcher.MatchedInstanceOps]
/**
* We can optimize the offer routing for different offer matcher in case there are reserved resources.
* A defined precedence is used to filter incoming offers with reservations that apply to this filter.
* If the filter matches, the offer matcher manager has higher priority than other matchers.
*/
def precedenceFor: Option[PathId] = None
}
| mesosphere/marathon | src/main/scala/mesosphere/marathon/core/matcher/base/OfferMatcher.scala | Scala | apache-2.0 | 3,447 |
package eu.execom.FabutPresentation.rest
import java.util._
import eu.execom.FabutPresentation.api._
import eu.execom.FabutPresentation.persistence._
import eu.execom.FabutPresentation.util._
import org.joda.time.DateTime
import org.json4s._
import org.scalatra._
import sun.misc.BASE64Encoder
abstract class AbstractSecuredServlet extends ScalatraServlet with Logging {
val SECURITY_TOKEN = "X-FABUTPRESENTATION-AUTH"
protected implicit val jsonFormats: Formats = DefaultFormats + new ArrayByteSerializer + DateTimeSerializer + new UUIDSerializer + new SortOrderSerializer + new InvitationStatusSerializer + new FriendRequestStatusSerializer
error {
case BadRequestException(code) =>
logger.trace("BadRequest error occurred with code: " + code)
halt(BadRequest(body = errorJson(code, "BadRequest error occurred with code")))
case DataConstraintException(code) =>
logger.trace("Data constraint error occurred with code: " + code)
halt(BadRequest(body = errorJson(code, "Data constraint error occurred with code")))
case UnauthorizedException(code) =>
logger.trace("Unauthorized error occurred with code: " + code)
halt(Unauthorized(body = errorJson(code, "Unauthorized error occurred with code")))
case e:MappingException =>
logger.warn(s"Bad data request, json object invalid format, ${e.msg}")
halt(BadRequest(body = errorJson("JSON_MAPPING_EXCEPTION", s"Bad data request, json object invalid format, ${e.msg}")))
case e =>
logger.error("Unknown error occurred", e)
halt(InternalServerError(reason = e.toString))
}
def errorJson(code:String, message: String) = {
s"""{ "code": "$code", "message": "$message" }""".stripMargin
}
def securityToken: String = (cookies.get(SECURITY_TOKEN), request.header(SECURITY_TOKEN)) match {
case (Some(authCode), _) =>
logger.trace("Requested security token is found in cookie")
authCode
case (_, Some(authCode)) =>
logger.trace("Requested security token is found in header cookie")
authCode
case _ =>
logger.error("Security token doesn't exist")
halt(Unauthorized(reason = "USER_CREDENTIALS_ARE_INVALID"))
}
def securityToken_=(token: String): Unit = cookies.set(SECURITY_TOKEN, token)
}
class ArrayByteSerializer extends Serializer[Array[Byte]] {
private val MyClassClass = classOf[Array[Byte]]
def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), Array[Byte]] = {
case (TypeInfo(MyClassClass, _), json) => json match {
case JString(content) =>
new sun.misc.BASE64Decoder().decodeBuffer(content)
case x => throw new MappingException("Can't convert " + x + " to Array[Byte]")
}
}
def serialize(implicit formats: Formats): PartialFunction[Any, JValue] = {
case x: Array[Byte] => JString(new BASE64Encoder().encode(x))
}
}
case object DateTimeSerializer extends CustomSerializer[DateTime](format => ( {
case JString(s) =>
val milliseconds: Long = format.dateFormat.parse(s).map(_.getTime).getOrElse(throw new MappingException("Invalid date format " + s))
new DateTime(milliseconds)
}, {
case d: DateTime => JString(format.dateFormat.format(d.toDate))
}
))
class UUIDSerializer extends Serializer[UUID] {
private val MyClassClass = classOf[UUID]
def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), UUID] = {
case (TypeInfo(MyClassClass, _), json) => json match {
case JString(content) => UUID.fromString(content)
case x => throw new MappingException("Can't convert " + x + " to UUID")
}
}
def serialize(implicit formats: Formats): PartialFunction[Any, JValue] = {
case x: UUID => JString(x.toString)
}
}
class SortOrderSerializer extends Serializer[SortOrder] {
private val MyClassClass = classOf[SortOrder]
def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), SortOrder] = {
case (TypeInfo(MyClassClass, _), json) => json match {
case JString(content) => SortOrder.withName(content)
case x => throw new MappingException("Can't deserialize " + x + " to SortOrder")
}
}
def serialize(implicit formats: Formats): PartialFunction[Any, JValue] = {
case x: SortOrder => JString(x.name)
}
}
class InvitationStatusSerializer extends Serializer[InvitationStatus] {
private val MyClassClass = classOf[InvitationStatus]
def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), InvitationStatus] = {
case (TypeInfo(MyClassClass, _), json) => json match {
case JString(content) => InvitationStatus.withName(content)
case x => throw new MappingException("Can't deserialize " + x + " to InvitationStatus")
}
}
def serialize(implicit formats: Formats): PartialFunction[Any, JValue] = {
case x: InvitationStatus => JString(x.name)
}
}
class FriendRequestStatusSerializer extends Serializer[FriendRequestStatus] {
private val MyClassClass = classOf[FriendRequestStatus]
def deserialize(implicit format: Formats): PartialFunction[(TypeInfo, JValue), FriendRequestStatus] = {
case (TypeInfo(MyClassClass, _), json) => json match {
case JString(content) => FriendRequestStatus.withName(content)
case x => throw new MappingException("Can't deserialize " + x + " to FriendRequestStatus")
}
}
def serialize(implicit formats: Formats): PartialFunction[Any, JValue] = {
case x: FriendRequestStatus => JString(x.name)
}
}
| idostanic/FabutPresentation | src/main/scala/eu/execom/FabutPresentation/rest/AbstractSecuredServlet.scala | Scala | apache-2.0 | 5,499 |
package aerospiker
import aerospiker.listener._
import cats.MonadError
import cats.data.Kleisli
import cats.syntax.either._
import com.aerospike.client.AerospikeException
import io.circe.{ Decoder, Encoder }
import scala.collection.generic.CanBuildFrom
trait Functions {
private type ME[F[_]] = MonadError[F, Throwable]
def pure[F[_]: ME, A](a: A): Action[F, A] = Kleisli.pure(a)
def lift[F[_]: ME, A](fa: F[A]): Action[F, A] = Kleisli.lift(fa)
type Func[A] = Either[Throwable, A] => Unit
protected def getAsync[A: Decoder](
fa: Func[A],
settings: Settings,
binNames: String*
)(implicit c: AerospikeClient): Unit
protected def putAsync[A: Encoder](
fa: Func[Unit],
settings: Settings, bins: A
)(implicit c: AerospikeClient): Unit
protected def deleteAsync(
fa: Func[Boolean],
settings: Settings
)(implicit c: AerospikeClient): Unit
protected def allAsync[C[_], A: Decoder](
fa: Func[C[(Key, Option[Record[A]])]],
settings: Settings,
binNames: String*
)(
implicit
c: AerospikeClient,
cbf: CanBuildFrom[Nothing, (Key, Option[Record[A]]), C[(Key, Option[Record[A]])]]
): Unit
protected def existsAsync(
fa: Func[Boolean],
settings: Settings
)(implicit c: AerospikeClient): Unit
}
private[aerospiker] trait Functions0 extends Functions {
protected def getAsync[A: Decoder](
fa: Func[A],
settings: Settings,
binNames: String*
)(implicit c: AerospikeClient): Unit = try {
Command.get[A](c, settings, binNames,
Some(new RecordListener[A] {
override def onFailure(e: AerospikeException): Unit = fa(GetError(settings.key, e).asLeft)
override def onSuccess(key: Key, record: Option[Record[A]]): Unit = record match {
case None => fa(NoSuchKey(settings.key).asLeft)
case Some(r) => r.bins match {
case None => fa(GetError(settings.key).asLeft)
case Some(bins) => fa(bins.asRight)
}
}
}))
} catch {
case e: Throwable => fa(e.asLeft)
}
protected def putAsync[A: Encoder](
fa: Func[Unit],
settings: Settings, bins: A
)(implicit c: AerospikeClient): Unit = try {
Command.put(c, settings, bins,
Some(new WriteListener {
override def onFailure(e: AerospikeException): Unit = fa(PutError(settings.key, e).asLeft)
override def onSuccess(key: Key): Unit = fa(().asRight)
}))
} catch {
case e: Throwable => fa(e.asLeft)
}
protected def deleteAsync(
fa: Func[Boolean],
settings: Settings
)(implicit c: AerospikeClient): Unit = try {
Command.delete(c, settings,
Some(new DeleteListener {
override def onFailure(e: AerospikeException): Unit = fa(DeleteError(settings.key, e).asLeft)
override def onSuccess(key: Key, exists: Boolean): Unit = fa(exists.asRight)
}))
} catch {
case e: Throwable => fa(e.asLeft)
}
protected def allAsync[C[_], A: Decoder](
fa: Func[C[(Key, Option[Record[A]])]],
settings: Settings,
binNames: String*
)(
implicit
c: AerospikeClient,
cbf: CanBuildFrom[Nothing, (Key, Option[Record[A]]), C[(Key, Option[Record[A]])]]
): Unit = try {
val builder = cbf.apply()
Command.all[A](c, settings, binNames,
Some(new RecordSequenceListener[A] {
def onRecord(key: Key, record: Option[Record[A]]): Unit = builder += key -> record
def onFailure(e: AerospikeException): Unit = fa(e.asLeft)
def onSuccess(): Unit = fa(builder.result().asRight)
}))
} catch {
case e: Throwable => fa(e.asLeft)
}
def existsAsync(
fa: Func[Boolean],
settings: Settings
)(implicit c: AerospikeClient): Unit = try {
Command.exists(c, settings,
Some(new ExistsListener {
def onFailure(e: AerospikeException): Unit = fa(e.asLeft)
def onSuccess(key: Key, exists: Boolean): Unit = fa(exists.asRight)
}))
} catch {
case e: Throwable => fa(e.asLeft)
}
}
| tkrs/aerospiker | core/src/main/scala/aerospiker/Functions.scala | Scala | mit | 3,974 |
// Copyright (c) 2020 Ben Zimmer. All rights reserved.
// Objects that represent flight paths and the various information
// to be tracked along with them.
package bdzimmer.orbits
import scala.collection.immutable.Seq
// This is sort of a parallel version of how flights
// are represented in Secondary
trait FlightParams {
val ship: Spacecraft
val origName: String // might degeneralize some of this eventually
val destName: String
val orig: Double => OrbitalState
val dest: Double => OrbitalState
val startDate: CalendarDateTime
val endDate: CalendarDateTime
val passengers: List[String]
val faction: String
val description: String
override def toString: String = {
startDate.dateString + " - " +
ship.name.replace("*", "") + " - " +
origName + " -> " + destName
}
}
// Basic FlightParams we've had up until this point
// Either constant acceleration or constant velocity depending on ship type
case class SimpleFlightParams(
ship: Spacecraft,
origName: String,
destName: String,
orig: Double => OrbitalState,
dest: Double => OrbitalState,
startDate: CalendarDateTime,
endDate: CalendarDateTime,
passengers: List[String],
faction: String,
description: String
) extends FlightParams
// FlightParams based on a precalculated path
case class PreCalculatedFlightParams(
ship: Spacecraft,
origName: String,
destName: String,
orig: Double => OrbitalState,
dest: Double => OrbitalState,
startDate: CalendarDateTime,
endDate: CalendarDateTime,
passengers: List[String],
faction: String,
description: String,
path: List[(Double, OrbitalState)] // precalculated path
) extends FlightParams
object FlightParams {
def paramsToFun(fp: FlightParams): (FlightFn, scala.collection.immutable.Seq[Double]) = {
val startDateJulian = fp.startDate.julian
val endDateJulian = fp.endDate.julian
fp match {
case sfp: SimpleFlightParams => {
val res = if ((endDateJulian - startDateJulian) > 1.0) {
// one tick per hour
1.0 / 24.0
} else {
// one tick per minute
1.0 / 24.0 / 60.0
}
// don't ask
val ticks = (startDateJulian until endDateJulian by res).toList.toIndexedSeq
// find positions of origin and destination bodies
val origState = sfp.orig(startDateJulian)
val destState = sfp.dest(endDateJulian)
val flightFn = sfp.ship match {
case _: ConstAccelCraft => ConstAccelFlightFn(
origState.position, destState.position,
startDateJulian, endDateJulian - startDateJulian)
case _: ConstVelCraft => ConstVelFlightFn(
origState.position, destState.position,
startDateJulian, endDateJulian - startDateJulian
)
}
(flightFn, ticks)
}
case pcfp: PreCalculatedFlightParams => {
val ticks = pcfp.path.map(_._1)
val flightFn = new LinearInterpFlightFn(
pcfp.startDate.julian, pcfp.endDate.julian, pcfp.path)
(flightFn, ticks)
}
case _ => (new DummyFlightFn, Seq(0.0d))
}
}
}
| bdzimmer/orbits | src/main/scala/bdzimmer/orbits/FlightParams.scala | Scala | bsd-3-clause | 3,207 |
package com.eevolution.context.dictionary.domain.api.service
import com.eevolution.context.dictionary.api
import com.eevolution.context.dictionary.domain.model.LdapProcessor
/**
* Copyright (C) 2003-2017, e-Evolution Consultants S.A. , http://www.e-evolution.com
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* Email: [email protected], http://www.e-evolution.com , http://github.com/EmerisScala
* Created by [email protected] , www.e-evolution.com on 10/11/17.
*/
/**
* Ldap Processor Service
*/
trait LdapProcessorService extends api.Service[LdapProcessor, Int] {
//Definition
}
| adempiere/ADReactiveSystem | dictionary-api/src/main/scala/com/eevolution/context/dictionary/domain/api/service/LdapProcessorService.scala | Scala | gpl-3.0 | 1,233 |
package com.github.skozlov.turing
import com.github.skozlov.turing.CellState._
import com.github.skozlov.turing.Direction._
import com.github.skozlov.turing.State.NonTerminal
import scala.collection.mutable.ArrayBuffer
import com.github.skozlov.turing.Tape._
/**
* A sequence of the cells considered a component of a turing machine.
* Tapes represented by the instances of this class can be expanded to the right
* or shortened from the right if necessary, but extended classes are allowed to change this behavior.
* Actually, an instance does not store the cells containing `0`
* and being to the right of the caret position and the most right cell containing `1`.
* @param initData a sequence of the initial states of the cells.
* The cells being to the right of the first cell and the most right cell containing `1` are ignored.
* For example,
* the instance created via `new Tape(Zero, One, One, Zero, Zero)` will contain only the first 3 cells.
*/
class Tape(initData: CellState*) extends Equals{
private val _cells: ArrayBuffer[CellState] = {
val size: Int = Math.max(1, initData.lastIndexOf(One) + 1)
ArrayBuffer((if(initData.isEmpty) Seq(Zero) else initData take size):_*)
}
private var _caretIndex: Int = 0
/**
* @return a state of the cell at the current position of the caret.
*/
def currentCell: CellState = _cells(_caretIndex)
/**
* Executes the program on the current tape until the terminal state is reached.
* @throws Tape.OutOfBoundsException if the program tries to move caret out of the permitted bounds of the tape.
* @throws Tape.OutOfBoundsException.Left if `caretIndex == 0` and the program tries to move the caret left.
*/
def apply(program: Program): Unit = apply(program.initialState)
/**
* Executes a program starting from the provided state until the terminal state is reached.
* @throws Tape.OutOfBoundsException if the program tries to move caret out of the permitted bounds of the tape.
* @throws Tape.OutOfBoundsException.Left if `caretIndex == 0` and the program tries to move the caret left.
*/
protected def apply(state: State): Unit = state match {
case NonTerminal(_, commands) =>
val command = commands(currentCell)
apply(command)
apply(command.nextState)
case _ =>
}
/**
* Performs the command on the current tape.
* @throws Tape.OutOfBoundsException if the command tries to move caret out of the permitted bounds of the tape.
* @throws Tape.OutOfBoundsException.Left if `caretIndex == 0` and the command tries to move the caret left.
*/
protected def apply(command: Command): Unit = {
command.cellNewState foreach {_cells(_caretIndex) = _}
command.movement foreach moveCaret
}
/**
* Moves the caret to the direction.
* @throws Tape.OutOfBoundsException if it is tried to move caret out of the permitted bounds of the tape.
* @throws Tape.OutOfBoundsException.Left if `direction == Direction.Left && caretIndex == 0`.
*/
protected def moveCaret(direction: Direction): Unit = {
if(direction == Direction.Left && _caretIndex == 0){
throw new OutOfBoundsException.Left()
}
if(_caretIndex == _cells.size - 1){
if(direction == Direction.Right){
_cells append Zero
}
else if(currentCell == Zero){
_cells.remove(_caretIndex)
}
}
_caretIndex = if(direction == Direction.Left) _caretIndex - 1 else _caretIndex + 1
}
/**
* @return a 0-based index representing the current position of the caret.
*/
def caretIndex: Int = _caretIndex
/**
* @return currently stored cells.
* The cells containing `0` and being to the right of the caret position
* and the most right cell containing `1` are not stored.
*/
def cells: Seq[CellState] = _cells
/**
* @return a string representation of the tape.
* @example `"01>1"` is returned for the tape containing (`0`, `1`, `1`) with `caretIndex == 2`
*/
override def toString: String = {
val prefix: String = if(_caretIndex == 0) "" else _cells.take(_caretIndex - 1).mkString
val suffix: String = _cells.takeRight(_cells.size - _caretIndex).mkString
s"$prefix>$suffix"
}
override def hashCode(): Int = (cells, _caretIndex).hashCode()
/**
* @return true if and only if the following conditions are true:
* <br />- `obj` is an instance of [[Tape]];
* <br />- {{{obj.asInstanceOf[Tape] canEqual this}}}
* <br />- {{{(this._cells == that._cells) && (this._caretIndex == that._caretIndex)}}}
*/
override def equals(obj: scala.Any): Boolean = obj match {
case that: Tape =>
(that != null) && (that canEqual this) &&
(this._cells == that._cells) && (this._caretIndex == that._caretIndex)
case _ => false
}
/**
* @return `true` if and only if `that` is an instance of [[Tape]].
*/
override def canEqual(that: Any): Boolean = that != null && that.isInstanceOf[Tape]
}
object Tape{
/**
* A right-bounded tape.
* `caretIndex` must always be less than `otherCells.size+1`.
* @param cell1 the first cell.
* @param otherCells the 2nd, 3rd, etc. cells, if present.
*/
class Finite(cell1: CellState, otherCells: CellState*) extends Tape(cell1 +: otherCells:_*) {
/**
* Moves the caret to the direction.
* @throws Tape.OutOfBoundsException if it is tried to move caret out of the permitted bounds of the tape.
* @throws Tape.OutOfBoundsException.Left if {{{direction == Direction.Left && caretIndex == 0}}}
* @throws Tape.OutOfBoundsException.Right if
* {{{direction == Direction.Right && caretIndex == otherCells.size}}}
*/
override protected def moveCaret(direction: Direction): Unit = {
if(direction == Direction.Right && caretIndex == otherCells.size){
throw new OutOfBoundsException.Right
} else super.moveCaret(direction)
}
}
/**
* An exception being thrown when a program tries to move the caret out of the permitted bounds of the tape.
*/
class OutOfBoundsException(message: String = null, cause: Option[Throwable] = None)
extends RuntimeException(message){
cause foreach initCause
}
object OutOfBoundsException{
/**
* An exception being thrown when a program tries to move the caret left from the left bound of the tape.
*/
class Left(message: String = "Cannot move left", cause: Option[Throwable] = None)
extends OutOfBoundsException(message, cause)
/**
* An exception being thrown when a program tries to move the caret right from the right bound of the tape.
*/
class Right(message: String = "Cannot move right", cause: Option[Throwable] = None)
extends OutOfBoundsException(message, cause)
}
} | skozlov/turing | base/src/main/scala/com/github/skozlov/turing/Tape.scala | Scala | apache-2.0 | 6,623 |
package build
import java.io.{Reader, StringReader}
import models._
import org.apache.lucene.analysis.standard.StandardAnalyzer
import org.apache.lucene.codecs.Codec
import org.apache.lucene.document.Field.Store
import org.apache.lucene.document._
import org.apache.lucene.index.FieldInfo.IndexOptions
import org.apache.lucene.index.IndexWriterConfig.OpenMode
import org.apache.lucene.index.{CheckIndex, IndexWriter, IndexWriterConfig, Term}
import org.apache.lucene.search.{BooleanClause, BooleanQuery, TermQuery}
import org.apache.lucene.store.FSDirectory
import org.apache.lucene.util.{Version => LucVersion}
import org.w3c.dom.{Element, Node, NodeList, Text}
import org.w3c.tidy.Tidy
import play.api.Logger
import settings.Global
import util.ResourceUtil
import scala.util.Try
import scala.collection.JavaConverters._
trait DocsIndexer {
def index(project: Project, version: String): Try[Unit]
def index(project: Project, version: String, file: FileWithContent): Try[Unit]
def index(publication: PublicationWithContent): Try[Unit]
def cleanPublicationIndex(publication: Publication): Try[Unit]
def cleanProjectAndVersionIndex(project: Project, version: String): Try[Unit]
def cleanProjectAndVersionFileIndex(project: Project, version: String, file: File): Try[Unit]
def checkIndex: Try[Unit]
}
object LuceneDocsIndexer {
private val LuceneVersion = LucVersion.LUCENE_43
private def indexWriterConfig: IndexWriterConfig = {
val iwc = new IndexWriterConfig(LuceneDocsIndexer.LuceneVersion, new StandardAnalyzer(LuceneDocsIndexer.LuceneVersion))
iwc.setOpenMode(OpenMode.CREATE_OR_APPEND)
iwc
}
}
trait LuceneDocsIndexer extends DocsIndexer {
self: DirectoryHandler =>
import util.ResourceUtil._
def cleanProjectAndVersionFileIndex(project: Project, version: String, file: File): Try[Unit] = Try {
val indexWriter: IndexWriter = {
val dir = FSDirectory.open(indexDir)
new IndexWriter(dir, LuceneDocsIndexer.indexWriterConfig)
}
val booleanQuery = new BooleanQuery()
booleanQuery.add(new TermQuery(new Term("project", project.url_key)), BooleanClause.Occur.MUST)
booleanQuery.add(new TermQuery(new Term("version", version)), BooleanClause.Occur.MUST)
booleanQuery.add(new TermQuery(new Term("path", file.url_key)), BooleanClause.Occur.MUST)
doWith(indexWriter) { writer =>
writer.deleteDocuments(booleanQuery)
writer.commit()
}
}.recover {
case e: Exception =>
Global.builds.createFailure(project.guid, version, "Cleaning Index failed - "+ e.getMessage)
throw e
}
def cleanProjectAndVersionIndex(project: Project, version: String): Try[Unit] = Try {
val indexWriter: IndexWriter = {
val dir = FSDirectory.open(indexDir)
new IndexWriter(dir, LuceneDocsIndexer.indexWriterConfig)
}
val booleanQuery = new BooleanQuery()
booleanQuery.add(new TermQuery(new Term("project", project.url_key)), BooleanClause.Occur.MUST)
booleanQuery.add(new TermQuery(new Term("version", version)), BooleanClause.Occur.MUST)
doWith(indexWriter) { writer =>
writer.deleteDocuments(booleanQuery)
writer.commit()
}
}.recover {
case e: Exception =>
Global.builds.createFailure(project.guid, version, "Cleaning Index failed - "+ e.getMessage)
throw e
}
def cleanPublicationIndex(publication: Publication): Try[Unit] = Try {
val indexWriter: IndexWriter = {
val dir = FSDirectory.open(indexDir)
new IndexWriter(dir, LuceneDocsIndexer.indexWriterConfig)
}
val booleanQuery = new BooleanQuery()
booleanQuery.add(new TermQuery(new Term("publication", publication.url_key)), BooleanClause.Occur.MUST)
doWith(indexWriter) { writer =>
writer.deleteDocuments(booleanQuery)
writer.commit()
}
}
def index(project: Project, version: String, file: FileWithContent): Try[Unit] = {
cleanProjectAndVersionFileIndex(project, version, file.file).map { _ =>
val index: IndexWriter = {
val dir = FSDirectory.open(indexDir)
new IndexWriter(dir, LuceneDocsIndexer.indexWriterConfig)
}
doWith(index) { indx =>
indexFile(project, version, file, indx)
indx.commit()
}
Global.gilesS3Client.backupIndex(indexDir)
}.recover {
case e: Exception =>
Global.builds.createFailure(project.guid, version, "Index failed - "+ e.getMessage)
throw e
}
}
def index(project: Project, version: String): Try[Unit] = {
cleanProjectAndVersionIndex(project, version).map { _ =>
val index: IndexWriter = {
val dir = FSDirectory.open(indexDir)
new IndexWriter(dir, LuceneDocsIndexer.indexWriterConfig)
}
doWith(index) { indx =>
indexProject(project, version, indx)
indx.commit()
}
Global.gilesS3Client.backupIndex(indexDir)
}.recover {
case e: Exception =>
Global.builds.createFailure(project.guid, version, "Index failed - "+ e.getMessage)
throw e
}
}
def index(publication: PublicationWithContent): Try[Unit] = Try {
val index: IndexWriter = {
val dir = FSDirectory.open(indexDir)
new IndexWriter(dir, LuceneDocsIndexer.indexWriterConfig)
}
doWith(index) { indx =>
val tidy = new Tidy()
tidy.setQuiet(true)
tidy.setShowWarnings(false)
ResourceUtil.doWith(new StringReader(publication.content)) { stream =>
val root = tidy.parseDOM(stream, null)
val rawDoc = Option(root.getDocumentElement)
val doc = new org.apache.lucene.document.Document()
rawDoc.flatMap(getBody).foreach { body =>
val fieldType = new FieldType()
fieldType.setIndexed(true)
fieldType.setStored(true)
fieldType.setStoreTermVectors(true)
fieldType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)
fieldType.setStoreTermVectorOffsets(true)
fieldType.setStoreTermVectorPayloads(true)
fieldType.setStoreTermVectorPositions(true)
fieldType.setTokenized(true)
doc.add(new Field("pub-body", body, fieldType))
doc.add(new StringField("pub-title",rawDoc.flatMap(getTitle).getOrElse(publication.publication.title), Store.YES))
doc.add(new StringField("publication", publication.publication.url_key, Store.YES))
}
}
indx.commit()
}
Global.gilesS3Client.backupIndex(indexDir)
}
def checkIndex: Try[Unit] = Try {
doWith(FSDirectory.open(indexDir)) { dir =>
val indexCheck = new CheckIndex(dir)
indexCheck.setCrossCheckTermVectors(true)
indexCheck.setInfoStream(System.out, true)
val result = indexCheck.checkIndex()
if(!result.clean) {
Logger.warn(s"${result.totLoseDocCount} documents will be lost")
Logger.warn(s"NOTE: will write new segments file; this will remove ${result.totLoseDocCount} docs from the index.")
indexCheck.fixIndex(result, Codec.getDefault)
Logger.info(s"Wrote new segments file '${result.segmentsFileName}'")
Global.gilesS3Client.backupIndex(indexDir)
}
}
}
private def indexProject(project: Project, version: String, index: IndexWriter): Unit = {
Logger.debug("Indexing Project ["+project.name+"]")
import dao.util.FileConverters._
Global.files.findAllByProjectGuidAndVersion(project.guid, version).
map(_.withContent).foreach { file =>
indexFile(project, version, file, index)
}
}
private def indexFile(project: Project, version: String, file: FileWithContent, index: IndexWriter): Unit = {
ResourceUtil.doWith(new StringReader(file.content)) { stream =>
index.addDocument(getDocument(project, version, file, stream))
}
}
private def getDocument(project: Project, version: String, file: FileWithContent, html: Reader): Document = {
val tidy = new Tidy()
tidy.setQuiet(true)
tidy.setShowWarnings(false)
val root = tidy.parseDOM(html, null)
val rawDoc = Option(root.getDocumentElement)
val doc = new org.apache.lucene.document.Document()
rawDoc.flatMap(getBody).foreach { body =>
val fieldType = new FieldType()
fieldType.setIndexed(true)
fieldType.setStored(true)
fieldType.setStoreTermVectors(true)
fieldType.setIndexOptions(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS)
fieldType.setStoreTermVectorOffsets(true)
fieldType.setStoreTermVectorPayloads(true)
fieldType.setStoreTermVectorPositions(true)
fieldType.setTokenized(true)
doc.add(new Field("body", body, fieldType))
doc.add(new StringField("title",rawDoc.flatMap(getTitle).getOrElse(file.file.title), Store.YES))
doc.add(new StringField("filename", file.file.filename, Store.YES))
doc.add(new StringField("path", file.file.url_key, Store.YES))
doc.add(new StringField("project", project.url_key, Store.YES))
doc.add(new StringField("version", version, Store.YES))
}
doc
}
private def getTitle(rawDoc: Element): Option[String] = {
rawDoc.getElementsByTagName("title").iter.toSeq.headOption.map{ titleElement =>
Option(titleElement.getFirstChild).map(_.asInstanceOf[Text].getData)
}.flatten
}
private def getBody(rawDoc: Element): Option[String] = {
rawDoc.getElementsByTagName("body").iter.toSeq.headOption.map(getText)
}
private def getText(node: Node): String = {
val children: Iterator[Node] = node.getChildNodes.iter
val sb = new StringBuffer()
for(child <- children) {
child.getNodeType match {
case Node.ELEMENT_NODE =>
sb.append(getText(child))
sb.append(" ")
case Node.TEXT_NODE =>
sb.append(child.asInstanceOf[Text].getData)
}
}
sb.toString
}
private implicit class RichNodeList(nodeList: NodeList) {
def iter: Iterator[Node] = Iterator.tabulate(nodeList.getLength)(nodeList.item)
}
}
| grahamar/Giles | app/build/DocsIndexer.scala | Scala | apache-2.0 | 9,979 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.consumer
import kafka.api.{OffsetRequest, Request, FetchRequestBuilder, FetchResponsePartitionData}
import kafka.cluster.BrokerEndPoint
import kafka.message.ByteBufferMessageSet
import kafka.server.{PartitionFetchState, AbstractFetcherThread}
import kafka.common.{ErrorMapping, TopicAndPartition}
import scala.collection.Map
import ConsumerFetcherThread._
import org.apache.kafka.common.TopicPartition
class ConsumerFetcherThread(name: String,
val config: ConsumerConfig,
sourceBroker: BrokerEndPoint,
partitionMap: Map[TopicPartition, PartitionTopicInfo],
val consumerFetcherManager: ConsumerFetcherManager)
extends AbstractFetcherThread(name = name,
clientId = config.clientId,
sourceBroker = sourceBroker,
fetchBackOffMs = config.refreshLeaderBackoffMs,
isInterruptible = true) {
type REQ = FetchRequest
type PD = PartitionData
private val clientId = config.clientId
private val fetchSize = config.fetchMessageMaxBytes
private val simpleConsumer = new SimpleConsumer(sourceBroker.host, sourceBroker.port, config.socketTimeoutMs,
config.socketReceiveBufferBytes, config.clientId)
private val fetchRequestBuilder = new FetchRequestBuilder().
clientId(clientId).
replicaId(Request.OrdinaryConsumerId).
maxWait(config.fetchWaitMaxMs).
minBytes(config.fetchMinBytes).
requestVersion(kafka.api.FetchRequest.CurrentVersion)
override def initiateShutdown(): Boolean = {
val justShutdown = super.initiateShutdown()
if (justShutdown && isInterruptible)
simpleConsumer.disconnectToHandleJavaIOBug()
justShutdown
}
override def shutdown(): Unit = {
super.shutdown()
simpleConsumer.close()
}
// process fetched data
def processPartitionData(topicPartition: TopicPartition, fetchOffset: Long, partitionData: PartitionData) {
val pti = partitionMap(topicPartition)
if (pti.getFetchOffset != fetchOffset)
throw new RuntimeException("Offset doesn't match for partition [%s,%d] pti offset: %d fetch offset: %d"
.format(topicPartition.topic, topicPartition.partition, pti.getFetchOffset, fetchOffset))
pti.enqueue(partitionData.underlying.messages.asInstanceOf[ByteBufferMessageSet])
}
// handle a partition whose offset is out of range and return a new fetch offset
def handleOffsetOutOfRange(topicPartition: TopicPartition): Long = {
val startTimestamp = config.autoOffsetReset match {
case OffsetRequest.SmallestTimeString => OffsetRequest.EarliestTime
case OffsetRequest.LargestTimeString => OffsetRequest.LatestTime
case _ => OffsetRequest.LatestTime
}
val topicAndPartition = new TopicAndPartition(topicPartition.topic, topicPartition.partition)
val newOffset = simpleConsumer.earliestOrLatestOffset(topicAndPartition, startTimestamp, Request.OrdinaryConsumerId)
val pti = partitionMap(topicPartition)
pti.resetFetchOffset(newOffset)
pti.resetConsumeOffset(newOffset)
newOffset
}
// any logic for partitions whose leader has changed
def handlePartitionsWithErrors(partitions: Iterable[TopicPartition]) {
removePartitions(partitions.toSet)
consumerFetcherManager.addPartitionsWithError(partitions)
}
protected def buildFetchRequest(partitionMap: collection.Seq[(TopicPartition, PartitionFetchState)]): FetchRequest = {
partitionMap.foreach { case ((topicPartition, partitionFetchState)) =>
if (partitionFetchState.isActive)
fetchRequestBuilder.addFetch(topicPartition.topic, topicPartition.partition, partitionFetchState.offset,
fetchSize)
}
new FetchRequest(fetchRequestBuilder.build())
}
protected def fetch(fetchRequest: FetchRequest): Seq[(TopicPartition, PartitionData)] =
simpleConsumer.fetch(fetchRequest.underlying).data.map { case (TopicAndPartition(t, p), value) =>
new TopicPartition(t, p) -> new PartitionData(value)
}
}
object ConsumerFetcherThread {
class FetchRequest(val underlying: kafka.api.FetchRequest) extends AbstractFetcherThread.FetchRequest {
private lazy val tpToOffset: Map[TopicPartition, Long] = underlying.requestInfo.map { case (tp, fetchInfo) =>
new TopicPartition(tp.topic, tp.partition) -> fetchInfo.offset
}.toMap
def isEmpty: Boolean = underlying.requestInfo.isEmpty
def offset(topicPartition: TopicPartition): Long = tpToOffset(topicPartition)
}
class PartitionData(val underlying: FetchResponsePartitionData) extends AbstractFetcherThread.PartitionData {
def errorCode: Short = underlying.error
def toByteBufferMessageSet: ByteBufferMessageSet = underlying.messages.asInstanceOf[ByteBufferMessageSet]
def highWatermark: Long = underlying.hw
def exception: Option[Throwable] =
if (errorCode == ErrorMapping.NoError) None else Some(ErrorMapping.exceptionFor(errorCode))
}
}
| flange/drift-dev | kafka/00-kafka_2.11-0.10.1.0/libs/tmp/kafka/consumer/ConsumerFetcherThread.scala | Scala | apache-2.0 | 5,910 |
package jp.t2v.lab.play2.auth
import scala.concurrent.{ExecutionContext, Future}
import play.api.mvc.RequestHeader
trait AsyncIdContainer[Id] {
def startNewSession(userId: Id, timeoutInSeconds: Int)(implicit request: RequestHeader, context: ExecutionContext): Future[AuthenticityToken]
def remove(token: AuthenticityToken)(implicit context: ExecutionContext): Future[Unit]
def get(token: AuthenticityToken)(implicit context: ExecutionContext): Future[Option[Id]]
def prolongTimeout(token: AuthenticityToken, timeoutInSeconds: Int)(implicit request: RequestHeader, context: ExecutionContext): Future[Unit]
}
object AsyncIdContainer {
def apply[A](underlying: IdContainer[A]): AsyncIdContainer[A] = new AsyncIdContainer[A] {
import Future.{successful => future}
def startNewSession(userId: A, timeoutInSeconds: Int)(implicit request: RequestHeader, context: ExecutionContext): Future[AuthenticityToken] =
future(underlying.startNewSession(userId, timeoutInSeconds))
def remove(token: AuthenticityToken)(implicit context: ExecutionContext): Future[Unit] = future(underlying.remove(token))
def get(token: AuthenticityToken)(implicit context: ExecutionContext): Future[Option[A]] = future(underlying.get(token))
def prolongTimeout(token: AuthenticityToken, timeoutInSeconds: Int)(implicit request: RequestHeader, context: ExecutionContext): Future[Unit] =
future(underlying.prolongTimeout(token, timeoutInSeconds))
}
} | indykish/play2-auth | module/src/main/scala/jp/t2v/lab/play2/auth/AsyncIdContainer.scala | Scala | apache-2.0 | 1,463 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package xml
import Utility.sbToString
import scala.annotation.tailrec
import scala.collection.AbstractIterable
import scala.collection.Seq
object MetaData {
/**
* appends all attributes from new_tail to attribs, without attempting to
* detect or remove duplicates. The method guarantees that all attributes
* from attribs come before the attributes in new_tail, but does not
* guarantee to preserve the relative order of attribs.
*
* Duplicates can be removed with `normalize`.
*/
@tailrec
def concatenate(attribs: MetaData, new_tail: MetaData): MetaData =
if (attribs eq Null) new_tail
else concatenate(attribs.next, attribs copy new_tail)
/**
* returns normalized MetaData, with all duplicates removed and namespace prefixes resolved to
* namespace URIs via the given scope.
*/
def normalize(attribs: MetaData, scope: NamespaceBinding): MetaData = {
def iterate(md: MetaData, normalized_attribs: MetaData, set: Set[String]): MetaData = {
if (md eq Null) {
normalized_attribs
} else if (md.value eq null) {
iterate(md.next, normalized_attribs, set)
} else {
val key = getUniversalKey(md, scope)
if (set(key)) {
iterate(md.next, normalized_attribs, set)
} else {
md copy iterate(md.next, normalized_attribs, set + key)
}
}
}
iterate(attribs, Null, Set())
}
/**
* returns key if md is unprefixed, pre+key is md is prefixed
*/
def getUniversalKey(attrib: MetaData, scope: NamespaceBinding) = attrib match {
case prefixed: PrefixedAttribute => scope.getURI(prefixed.pre) + prefixed.key
case unprefixed: UnprefixedAttribute => unprefixed.key
}
/**
* returns MetaData with attributes updated from given MetaData
*/
def update(attribs: MetaData, scope: NamespaceBinding, updates: MetaData): MetaData =
normalize(concatenate(updates, attribs), scope)
}
/**
* This class represents an attribute and at the same time a linked list of
* attributes. Every instance of this class is either
* - an instance of `UnprefixedAttribute key,value` or
* - an instance of `PrefixedAttribute namespace_prefix,key,value` or
* - `Null, the empty attribute list.
*
* Namespace URIs are obtained by using the namespace scope of the element
* owning this attribute (see `getNamespace`).
*/
abstract class MetaData
extends AbstractIterable[MetaData]
with Iterable[MetaData]
with Equality
with Serializable {
/**
* Updates this MetaData with the MetaData given as argument. All attributes that occur in updates
* are part of the resulting MetaData. If an attribute occurs in both this instance and
* updates, only the one in updates is part of the result (avoiding duplicates). For prefixed
* attributes, namespaces are resolved using the given scope, which defaults to TopScope.
*
* @param updates MetaData with new and updated attributes
* @return a new MetaData instance that contains old, new and updated attributes
*/
def append(updates: MetaData, scope: NamespaceBinding = TopScope): MetaData =
MetaData.update(this, scope, updates)
/**
* Gets value of unqualified (unprefixed) attribute with given key, null if not found
*
* @param key
* @return value as Seq[Node] if key is found, null otherwise
*/
def apply(key: String): Seq[Node]
/**
* convenience method, same as `apply(namespace, owner.scope, key)`.
*
* @param namespace_uri namespace uri of key
* @param owner the element owning this attribute list
* @param key the attribute key
*/
final def apply(namespace_uri: String, owner: Node, key: String): Seq[Node] =
apply(namespace_uri, owner.scope, key)
/**
* Gets value of prefixed attribute with given key and namespace, null if not found
*
* @param namespace_uri namespace uri of key
* @param scp a namespace scp (usually of the element owning this attribute list)
* @param k to be looked for
* @return value as Seq[Node] if key is found, null otherwise
*/
def apply(namespace_uri: String, scp: NamespaceBinding, k: String): Seq[Node]
/**
* returns a copy of this MetaData item with next field set to argument.
*/
def copy(next: MetaData): MetaData
/** if owner is the element of this metadata item, returns namespace */
def getNamespace(owner: Node): String
def hasNext = (Null != next)
def length: Int = length(0)
def length(i: Int): Int = next.length(i + 1)
def isPrefixed: Boolean
override def canEqual(other: Any) = other match {
case _: MetaData => true
case _ => false
}
override def strict_==(other: Equality) = other match {
case m: MetaData => this.asAttrMap == m.asAttrMap
case _ => false
}
protected def basisForHashCode: Seq[Any] = List(this.asAttrMap)
/** filters this sequence of meta data */
override def filter(f: MetaData => Boolean): MetaData =
if (f(this)) copy(next filter f)
else next filter f
def reverse: MetaData =
foldLeft(Null: MetaData) { (x, xs) =>
xs.copy(x)
}
/** returns key of this MetaData item */
def key: String
/** returns value of this MetaData item */
def value: Seq[Node]
/**
* Returns a String containing "prefix:key" if the first key is
* prefixed, and "key" otherwise.
*/
def prefixedKey = this match {
case x: Attribute if x.isPrefixed => x.pre + ":" + key
case _ => key
}
/**
* Returns a Map containing the attributes stored as key/value pairs.
*/
def asAttrMap: Map[String, String] =
(iterator map (x => (x.prefixedKey, x.value.text))).toMap
/** returns Null or the next MetaData item */
def next: MetaData
/**
* Gets value of unqualified (unprefixed) attribute with given key, None if not found
*
* @param key
* @return value in Some(Seq[Node]) if key is found, None otherwise
*/
final def get(key: String): Option[Seq[Node]] = Option(apply(key))
/** same as get(uri, owner.scope, key) */
final def get(uri: String, owner: Node, key: String): Option[Seq[Node]] =
get(uri, owner.scope, key)
/**
* gets value of qualified (prefixed) attribute with given key.
*
* @param uri namespace of key
* @param scope a namespace scp (usually of the element owning this attribute list)
* @param key to be looked fore
* @return value as Some[Seq[Node]] if key is found, None otherwise
*/
final def get(uri: String, scope: NamespaceBinding, key: String): Option[Seq[Node]] =
Option(apply(uri, scope, key))
protected def toString1(): String = sbToString(toString1)
// appends string representations of single attribute to StringBuilder
protected def toString1(sb: StringBuilder): Unit
override def toString(): String = sbToString(buildString)
def buildString(sb: StringBuilder): StringBuilder = {
sb append ' '
toString1(sb)
next buildString sb
}
/**
*/
def wellformed(scope: NamespaceBinding): Boolean
def remove(key: String): MetaData
def remove(namespace: String, scope: NamespaceBinding, key: String): MetaData
final def remove(namespace: String, owner: Node, key: String): MetaData =
remove(namespace, owner.scope, key)
}
| scala/scala-xml | shared/src/main/scala/scala/xml/MetaData.scala | Scala | apache-2.0 | 7,592 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.admin
import java.util
import java.util.Properties
import kafka.admin.ConfigCommand.ConfigCommandOptions
import kafka.api.ApiVersion
import kafka.cluster.{Broker, EndPoint}
import kafka.server.{ConfigEntityName, KafkaConfig}
import kafka.utils.{Exit, Logging}
import kafka.zk.{AdminZkClient, BrokerInfo, KafkaZkClient, ZooKeeperTestHarness}
import org.apache.kafka.clients.admin._
import org.apache.kafka.common.config.{ConfigException, ConfigResource}
import org.apache.kafka.common.internals.KafkaFutureImpl
import org.apache.kafka.common.Node
import org.apache.kafka.common.errors.InvalidConfigurationException
import org.apache.kafka.common.network.ListenerName
import org.apache.kafka.common.security.auth.SecurityProtocol
import org.apache.kafka.common.security.scram.internals.ScramCredentialUtils
import org.apache.kafka.common.utils.Sanitizer
import org.easymock.EasyMock
import org.junit.Assert._
import org.junit.Test
import scala.collection.{Seq, mutable}
import scala.collection.JavaConverters._
class ConfigCommandTest extends ZooKeeperTestHarness with Logging {
@Test
def shouldExitWithNonZeroStatusOnArgError(): Unit = {
assertNonZeroStatusExit(Array("--blah"))
}
@Test
def shouldExitWithNonZeroStatusOnZkCommandError(): Unit = {
assertNonZeroStatusExit(Array(
"--zookeeper", zkConnect,
"--entity-name", "1",
"--entity-type", "brokers",
"--alter",
"--add-config", "security.inter.broker.protocol=PLAINTEXT"))
}
@Test
def shouldExitWithNonZeroStatusOnBrokerCommandError(): Unit = {
assertNonZeroStatusExit(Array(
"--bootstrap-server", "invalid host",
"--entity-type", "brokers",
"--entity-name", "1",
"--describe"))
}
private def assertNonZeroStatusExit(args: Array[String]): Unit = {
var exitStatus: Option[Int] = None
Exit.setExitProcedure { (status, _) =>
exitStatus = Some(status)
throw new RuntimeException
}
try {
ConfigCommand.main(args)
} catch {
case e: RuntimeException =>
} finally {
Exit.resetExitProcedure()
}
assertEquals(Some(1), exitStatus)
}
@Test
def shouldParseArgumentsForClientsEntityType() {
testArgumentParse("clients")
}
@Test
def shouldParseArgumentsForTopicsEntityType() {
testArgumentParse("topics")
}
@Test
def shouldParseArgumentsForBrokersEntityType() {
testArgumentParse("brokers")
}
def testArgumentParse(entityType: String) = {
// Should parse correctly
var createOpts = new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", "x",
"--entity-type", entityType,
"--describe"))
createOpts.checkArgs()
// For --alter and added config
createOpts = new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", "x",
"--entity-type", entityType,
"--alter",
"--add-config", "a=b,c=d"))
createOpts.checkArgs()
// For alter and deleted config
createOpts = new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", "x",
"--entity-type", entityType,
"--alter",
"--delete-config", "a,b,c"))
createOpts.checkArgs()
// For alter and both added, deleted config
createOpts = new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", "x",
"--entity-type", entityType,
"--alter",
"--add-config", "a=b,c=d",
"--delete-config", "a"))
createOpts.checkArgs()
val addedProps = ConfigCommand.parseConfigsToBeAdded(createOpts)
assertEquals(2, addedProps.size())
assertEquals("b", addedProps.getProperty("a"))
assertEquals("d", addedProps.getProperty("c"))
val deletedProps = ConfigCommand.parseConfigsToBeDeleted(createOpts)
assertEquals(1, deletedProps.size)
assertEquals("a", deletedProps.head)
createOpts = new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", "x",
"--entity-type", entityType,
"--alter",
"--add-config", "a=b,c=,d=e,f="))
createOpts.checkArgs()
val addedProps2 = ConfigCommand.parseConfigsToBeAdded(createOpts)
assertEquals(4, addedProps2.size())
assertEquals("b", addedProps2.getProperty("a"))
assertEquals("e", addedProps2.getProperty("d"))
assertTrue(addedProps2.getProperty("c").isEmpty)
assertTrue(addedProps2.getProperty("f").isEmpty)
}
@Test(expected = classOf[IllegalArgumentException])
def shouldFailIfUnrecognisedEntityType(): Unit = {
val createOpts = new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", "client", "--entity-type", "not-recognised", "--alter", "--add-config", "a=b,c=d"))
ConfigCommand.alterConfig(null, createOpts, new DummyAdminZkClient(zkClient))
}
@Test
def shouldAddClientConfig(): Unit = {
val createOpts = new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", "my-client-id",
"--entity-type", "clients",
"--alter",
"--add-config", "a=b,c=d"))
class TestAdminZkClient(zkClient: KafkaZkClient) extends AdminZkClient(zkClient) {
override def changeClientIdConfig(clientId: String, configChange: Properties): Unit = {
assertEquals("my-client-id", clientId)
assertEquals("b", configChange.get("a"))
assertEquals("d", configChange.get("c"))
}
}
ConfigCommand.alterConfig(null, createOpts, new TestAdminZkClient(zkClient))
}
@Test
def shouldAddTopicConfig(): Unit = {
val createOpts = new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", "my-topic",
"--entity-type", "topics",
"--alter",
"--add-config", "a=b,c=d"))
class TestAdminZkClient(zkClient: KafkaZkClient) extends AdminZkClient(zkClient) {
override def changeTopicConfig(topic: String, configChange: Properties): Unit = {
assertEquals("my-topic", topic)
assertEquals("b", configChange.get("a"))
assertEquals("d", configChange.get("c"))
}
}
ConfigCommand.alterConfig(null, createOpts, new TestAdminZkClient(zkClient))
}
@Test
def shouldAddBrokerQuotaConfig(): Unit = {
val alterOpts = new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", "1",
"--entity-type", "brokers",
"--alter",
"--add-config", "leader.replication.throttled.rate=10,follower.replication.throttled.rate=20"))
class TestAdminZkClient(zkClient: KafkaZkClient) extends AdminZkClient(zkClient) {
override def changeBrokerConfig(brokerIds: Seq[Int], configChange: Properties): Unit = {
assertEquals(Seq(1), brokerIds)
assertEquals("10", configChange.get("leader.replication.throttled.rate"))
assertEquals("20", configChange.get("follower.replication.throttled.rate"))
}
}
ConfigCommand.alterConfig(null, alterOpts, new TestAdminZkClient(zkClient))
}
@Test
def shouldAddBrokerDynamicConfig(): Unit = {
val node = new Node(1, "localhost", 9092)
verifyAlterBrokerConfig(node, "1", List("--entity-name", "1"))
}
@Test
def shouldAddDefaultBrokerDynamicConfig(): Unit = {
val node = new Node(1, "localhost", 9092)
verifyAlterBrokerConfig(node, "", List("--entity-default"))
}
def verifyAlterBrokerConfig(node: Node, resourceName: String, resourceOpts: List[String]): Unit = {
val optsList = List("--bootstrap-server", "localhost:9092",
"--entity-type", "brokers",
"--alter",
"--add-config", "message.max.bytes=10") ++ resourceOpts
val alterOpts = new ConfigCommandOptions(optsList.toArray)
val brokerConfigs = mutable.Map[String, String]("num.io.threads" -> "5")
val resource = new ConfigResource(ConfigResource.Type.BROKER, resourceName)
val configEntries = util.Collections.singletonList(new ConfigEntry("num.io.threads", "5"))
val future = new KafkaFutureImpl[util.Map[ConfigResource, Config]]
future.complete(util.Collections.singletonMap(resource, new Config(configEntries)))
val describeResult: DescribeConfigsResult = EasyMock.createNiceMock(classOf[DescribeConfigsResult])
EasyMock.expect(describeResult.all()).andReturn(future).once()
val alterFuture = new KafkaFutureImpl[Void]
alterFuture.complete(null)
val alterResult: AlterConfigsResult = EasyMock.createNiceMock(classOf[AlterConfigsResult])
EasyMock.expect(alterResult.all()).andReturn(alterFuture)
val mockAdminClient = new MockAdminClient(util.Collections.singletonList(node), node) {
override def describeConfigs(resources: util.Collection[ConfigResource], options: DescribeConfigsOptions): DescribeConfigsResult = {
assertEquals(1, resources.size)
val resource = resources.iterator.next
assertEquals(ConfigResource.Type.BROKER, resource.`type`)
assertEquals(resourceName, resource.name)
describeResult
}
override def alterConfigs(configs: util.Map[ConfigResource, Config], options: AlterConfigsOptions): AlterConfigsResult = {
assertEquals(1, configs.size)
val entry = configs.entrySet.iterator.next
val resource = entry.getKey
val config = entry.getValue
assertEquals(ConfigResource.Type.BROKER, resource.`type`)
config.entries.asScala.foreach { e => brokerConfigs.put(e.name, e.value) }
alterResult
}
}
EasyMock.replay(alterResult, describeResult)
ConfigCommand.alterBrokerConfig(mockAdminClient, alterOpts, resourceName)
assertEquals(Map("message.max.bytes" -> "10", "num.io.threads" -> "5"), brokerConfigs.toMap)
EasyMock.reset(alterResult, describeResult)
}
@Test
def shouldSupportCommaSeparatedValues(): Unit = {
val createOpts = new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", "my-topic",
"--entity-type", "topics",
"--alter",
"--add-config", "a=b,c=[d,e ,f],g=[h,i]"))
class TestAdminZkClient(zkClient: KafkaZkClient) extends AdminZkClient(zkClient) {
override def changeBrokerConfig(brokerIds: Seq[Int], configChange: Properties): Unit = {
assertEquals(Seq(1), brokerIds)
assertEquals("b", configChange.get("a"))
assertEquals("d,e ,f", configChange.get("c"))
assertEquals("h,i", configChange.get("g"))
}
override def changeTopicConfig(topic: String, configs: Properties): Unit = {}
}
ConfigCommand.alterConfig(null, createOpts, new TestAdminZkClient(zkClient))
}
@Test (expected = classOf[IllegalArgumentException])
def shouldNotUpdateBrokerConfigIfMalformedEntityName(): Unit = {
val createOpts = new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", "1,2,3", //Don't support multiple brokers currently
"--entity-type", "brokers",
"--alter",
"--add-config", "leader.replication.throttled.rate=10"))
ConfigCommand.alterConfig(null, createOpts, new DummyAdminZkClient(zkClient))
}
@Test
def testDynamicBrokerConfigUpdateUsingZooKeeper(): Unit = {
val brokerId = "1"
val adminZkClient = new AdminZkClient(zkClient)
val alterOpts = Array("--zookeeper", zkConnect, "--entity-type", "brokers", "--alter")
def entityOpt(brokerId: Option[String]): Array[String] = {
brokerId.map(id => Array("--entity-name", id)).getOrElse(Array("--entity-default"))
}
def alterConfig(configs: Map[String, String], brokerId: Option[String],
encoderConfigs: Map[String, String] = Map.empty): Unit = {
val configStr = (configs ++ encoderConfigs).map { case (k, v) => s"$k=$v" }.mkString(",")
val addOpts = new ConfigCommandOptions(alterOpts ++ entityOpt(brokerId) ++ Array("--add-config", configStr))
ConfigCommand.alterConfig(zkClient, addOpts, adminZkClient)
}
def verifyConfig(configs: Map[String, String], brokerId: Option[String]): Unit = {
val entityConfigs = zkClient.getEntityConfigs("brokers", brokerId.getOrElse(ConfigEntityName.Default))
assertEquals(configs, entityConfigs.asScala)
}
def alterAndVerifyConfig(configs: Map[String, String], brokerId: Option[String]): Unit = {
alterConfig(configs, brokerId)
verifyConfig(configs, brokerId)
}
def deleteAndVerifyConfig(configNames: Set[String], brokerId: Option[String]): Unit = {
val deleteOpts = new ConfigCommandOptions(alterOpts ++ entityOpt(brokerId) ++
Array("--delete-config", configNames.mkString(",")))
ConfigCommand.alterConfig(zkClient, deleteOpts, adminZkClient)
verifyConfig(Map.empty, brokerId)
}
// Add config
alterAndVerifyConfig(Map("message.max.size" -> "110000"), Some(brokerId))
alterAndVerifyConfig(Map("message.max.size" -> "120000"), None)
// Change config
alterAndVerifyConfig(Map("message.max.size" -> "130000"), Some(brokerId))
alterAndVerifyConfig(Map("message.max.size" -> "140000"), None)
// Delete config
deleteAndVerifyConfig(Set("message.max.size"), Some(brokerId))
deleteAndVerifyConfig(Set("message.max.size"), None)
// Listener configs: should work only with listener name
alterAndVerifyConfig(Map("listener.name.external.ssl.keystore.location" -> "/tmp/test.jks"), Some(brokerId))
intercept[ConfigException](alterConfig(Map("ssl.keystore.location" -> "/tmp/test.jks"), Some(brokerId)))
// Per-broker config configured at default cluster-level should fail
intercept[ConfigException](alterConfig(Map("listener.name.external.ssl.keystore.location" -> "/tmp/test.jks"), None))
deleteAndVerifyConfig(Set("listener.name.external.ssl.keystore.location"), Some(brokerId))
// Password config update without encoder secret should fail
intercept[IllegalArgumentException](alterConfig(Map("listener.name.external.ssl.keystore.password" -> "secret"), Some(brokerId)))
// Password config update with encoder secret should succeed and encoded password must be stored in ZK
val configs = Map("listener.name.external.ssl.keystore.password" -> "secret", "log.cleaner.threads" -> "2")
val encoderConfigs = Map(KafkaConfig.PasswordEncoderSecretProp -> "encoder-secret")
alterConfig(configs, Some(brokerId), encoderConfigs)
val brokerConfigs = zkClient.getEntityConfigs("brokers", brokerId)
assertFalse("Encoder secret stored in ZooKeeper", brokerConfigs.contains(KafkaConfig.PasswordEncoderSecretProp))
assertEquals("2", brokerConfigs.getProperty("log.cleaner.threads")) // not encoded
val encodedPassword = brokerConfigs.getProperty("listener.name.external.ssl.keystore.password")
val passwordEncoder = ConfigCommand.createPasswordEncoder(encoderConfigs)
assertEquals("secret", passwordEncoder.decode(encodedPassword).value)
assertEquals(configs.size, brokerConfigs.size)
// Password config update with overrides for encoder parameters
val configs2 = Map("listener.name.internal.ssl.keystore.password" -> "secret2")
val encoderConfigs2 = Map(KafkaConfig.PasswordEncoderSecretProp -> "encoder-secret",
KafkaConfig.PasswordEncoderCipherAlgorithmProp -> "DES/CBC/PKCS5Padding",
KafkaConfig.PasswordEncoderIterationsProp -> "1024",
KafkaConfig.PasswordEncoderKeyFactoryAlgorithmProp -> "PBKDF2WithHmacSHA1",
KafkaConfig.PasswordEncoderKeyLengthProp -> "64")
alterConfig(configs2, Some(brokerId), encoderConfigs2)
val brokerConfigs2 = zkClient.getEntityConfigs("brokers", brokerId)
val encodedPassword2 = brokerConfigs2.getProperty("listener.name.internal.ssl.keystore.password")
assertEquals("secret2", ConfigCommand.createPasswordEncoder(encoderConfigs).decode(encodedPassword2).value)
assertEquals("secret2", ConfigCommand.createPasswordEncoder(encoderConfigs2).decode(encodedPassword2).value)
// Password config update at default cluster-level should fail
intercept[ConfigException](alterConfig(configs, None, encoderConfigs))
// Dynamic config updates using ZK should fail if broker is running.
registerBrokerInZk(brokerId.toInt)
intercept[IllegalArgumentException](alterConfig(Map("message.max.size" -> "210000"), Some(brokerId)))
intercept[IllegalArgumentException](alterConfig(Map("message.max.size" -> "220000"), None))
// Dynamic config updates using ZK should for a different broker that is not running should succeed
alterAndVerifyConfig(Map("message.max.size" -> "230000"), Some("2"))
}
@Test (expected = classOf[IllegalArgumentException])
def shouldNotUpdateBrokerConfigIfMalformedConfig(): Unit = {
val createOpts = new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", "1",
"--entity-type", "brokers",
"--alter",
"--add-config", "a=="))
ConfigCommand.alterConfig(null, createOpts, new DummyAdminZkClient(zkClient))
}
@Test (expected = classOf[IllegalArgumentException])
def shouldNotUpdateBrokerConfigIfMalformedBracketConfig(): Unit = {
val createOpts = new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", "1",
"--entity-type", "brokers",
"--alter",
"--add-config", "a=[b,c,d=e"))
ConfigCommand.alterConfig(null, createOpts, new DummyAdminZkClient(zkClient))
}
@Test (expected = classOf[InvalidConfigurationException])
def shouldNotUpdateBrokerConfigIfNonExistingConfigIsDeleted(): Unit = {
val createOpts = new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", "my-topic",
"--entity-type", "topics",
"--alter",
"--delete-config", "missing_config1, missing_config2"))
ConfigCommand.alterConfig(null, createOpts, new DummyAdminZkClient(zkClient))
}
@Test
def shouldDeleteBrokerConfig(): Unit = {
val createOpts = new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", "1",
"--entity-type", "brokers",
"--alter",
"--delete-config", "a,c"))
class TestAdminZkClient(zkClient: KafkaZkClient) extends AdminZkClient(zkClient) {
override def fetchEntityConfig(entityType: String, entityName: String): Properties = {
val properties: Properties = new Properties
properties.put("a", "b")
properties.put("c", "d")
properties.put("e", "f")
properties
}
override def changeBrokerConfig(brokerIds: Seq[Int], configChange: Properties): Unit = {
assertEquals("f", configChange.get("e"))
assertEquals(1, configChange.size())
}
}
ConfigCommand.alterConfig(null, createOpts, new TestAdminZkClient(zkClient))
}
@Test
def testScramCredentials(): Unit = {
def createOpts(user: String, config: String): ConfigCommandOptions = {
new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", user,
"--entity-type", "users",
"--alter",
"--add-config", config))
}
def deleteOpts(user: String, mechanism: String) = new ConfigCommandOptions(Array("--zookeeper", zkConnect,
"--entity-name", user,
"--entity-type", "users",
"--alter",
"--delete-config", mechanism))
val credentials = mutable.Map[String, Properties]()
case class CredentialChange(user: String, mechanisms: Set[String], iterations: Int) extends AdminZkClient(zkClient) {
override def fetchEntityConfig(entityType: String, entityName: String): Properties = {
credentials.getOrElse(entityName, new Properties())
}
override def changeUserOrUserClientIdConfig(sanitizedEntityName: String, configChange: Properties): Unit = {
assertEquals(user, sanitizedEntityName)
assertEquals(mechanisms, configChange.keySet().asScala)
for (mechanism <- mechanisms) {
val value = configChange.getProperty(mechanism)
assertEquals(-1, value.indexOf("password="))
val scramCredential = ScramCredentialUtils.credentialFromString(value)
assertEquals(iterations, scramCredential.iterations)
if (configChange != null)
credentials.put(user, configChange)
}
}
}
val optsA = createOpts("userA", "SCRAM-SHA-256=[iterations=8192,password=abc, def]")
ConfigCommand.alterConfig(null, optsA, CredentialChange("userA", Set("SCRAM-SHA-256"), 8192))
val optsB = createOpts("userB", "SCRAM-SHA-256=[iterations=4096,password=abc, def],SCRAM-SHA-512=[password=1234=abc]")
ConfigCommand.alterConfig(null, optsB, CredentialChange("userB", Set("SCRAM-SHA-256", "SCRAM-SHA-512"), 4096))
val del256 = deleteOpts("userB", "SCRAM-SHA-256")
ConfigCommand.alterConfig(null, del256, CredentialChange("userB", Set("SCRAM-SHA-512"), 4096))
val del512 = deleteOpts("userB", "SCRAM-SHA-512")
ConfigCommand.alterConfig(null, del512, CredentialChange("userB", Set(), 4096))
}
@Test
def testQuotaConfigEntity() {
def createOpts(entityType: String, entityName: Option[String], otherArgs: Array[String]) : ConfigCommandOptions = {
val optArray = Array("--zookeeper", zkConnect,
"--entity-type", entityType)
val nameArray = entityName match {
case Some(name) => Array("--entity-name", name)
case None => Array[String]()
}
new ConfigCommandOptions(optArray ++ nameArray ++ otherArgs)
}
def checkEntity(entityType: String, entityName: Option[String], expectedEntityName: String, otherArgs: Array[String]) {
val opts = createOpts(entityType, entityName, otherArgs)
opts.checkArgs()
val entity = ConfigCommand.parseEntity(opts)
assertEquals(entityType, entity.root.entityType)
assertEquals(expectedEntityName, entity.fullSanitizedName)
}
def checkInvalidEntity(entityType: String, entityName: Option[String], otherArgs: Array[String]) {
val opts = createOpts(entityType, entityName, otherArgs)
try {
opts.checkArgs()
ConfigCommand.parseEntity(opts)
fail("Did not fail with invalid argument list")
} catch {
case _: IllegalArgumentException => // expected exception
}
}
val describeOpts = Array("--describe")
val alterOpts = Array("--alter", "--add-config", "a=b,c=d")
// <client-id> quota
val clientId = "client-1"
for (opts <- Seq(describeOpts, alterOpts)) {
checkEntity("clients", Some(clientId), clientId, opts)
checkEntity("clients", Some(""), ConfigEntityName.Default, opts)
}
checkEntity("clients", None, "", describeOpts)
checkInvalidEntity("clients", None, alterOpts)
// <user> quota
val principal = "CN=ConfigCommandTest,O=Apache,L=<default>"
val sanitizedPrincipal = Sanitizer.sanitize(principal)
assertEquals(-1, sanitizedPrincipal.indexOf('='))
assertEquals(principal, Sanitizer.desanitize(sanitizedPrincipal))
for (opts <- Seq(describeOpts, alterOpts)) {
checkEntity("users", Some(principal), sanitizedPrincipal, opts)
checkEntity("users", Some(""), ConfigEntityName.Default, opts)
}
checkEntity("users", None, "", describeOpts)
checkInvalidEntity("users", None, alterOpts)
// <user, client-id> quota
val userClient = sanitizedPrincipal + "/clients/" + clientId
def clientIdOpts(name: String) = Array("--entity-type", "clients", "--entity-name", name)
for (opts <- Seq(describeOpts, alterOpts)) {
checkEntity("users", Some(principal), userClient, opts ++ clientIdOpts(clientId))
checkEntity("users", Some(principal), sanitizedPrincipal + "/clients/" + ConfigEntityName.Default, opts ++ clientIdOpts(""))
checkEntity("users", Some(""), ConfigEntityName.Default + "/clients/" + clientId, describeOpts ++ clientIdOpts(clientId))
checkEntity("users", Some(""), ConfigEntityName.Default + "/clients/" + ConfigEntityName.Default, opts ++ clientIdOpts(""))
}
checkEntity("users", Some(principal), sanitizedPrincipal + "/clients", describeOpts ++ Array("--entity-type", "clients"))
// Both user and client-id must be provided for alter
checkInvalidEntity("users", Some(principal), alterOpts ++ Array("--entity-type", "clients"))
checkInvalidEntity("users", None, alterOpts ++ clientIdOpts(clientId))
checkInvalidEntity("users", None, alterOpts ++ Array("--entity-type", "clients"))
}
@Test
def testUserClientQuotaOpts() {
def checkEntity(expectedEntityType: String, expectedEntityName: String, args: String*) {
val opts = new ConfigCommandOptions(Array("--zookeeper", zkConnect) ++ args)
opts.checkArgs()
val entity = ConfigCommand.parseEntity(opts)
assertEquals(expectedEntityType, entity.root.entityType)
assertEquals(expectedEntityName, entity.fullSanitizedName)
}
// <default> is a valid user principal and client-id (can be handled with URL-encoding),
checkEntity("users", Sanitizer.sanitize("<default>"),
"--entity-type", "users", "--entity-name", "<default>",
"--alter", "--add-config", "a=b,c=d")
checkEntity("clients", Sanitizer.sanitize("<default>"),
"--entity-type", "clients", "--entity-name", "<default>",
"--alter", "--add-config", "a=b,c=d")
checkEntity("users", Sanitizer.sanitize("CN=user1") + "/clients/client1",
"--entity-type", "users", "--entity-name", "CN=user1", "--entity-type", "clients", "--entity-name", "client1",
"--alter", "--add-config", "a=b,c=d")
checkEntity("users", Sanitizer.sanitize("CN=user1") + "/clients/client1",
"--entity-name", "CN=user1", "--entity-type", "users", "--entity-name", "client1", "--entity-type", "clients",
"--alter", "--add-config", "a=b,c=d")
checkEntity("users", Sanitizer.sanitize("CN=user1") + "/clients/client1",
"--entity-type", "clients", "--entity-name", "client1", "--entity-type", "users", "--entity-name", "CN=user1",
"--alter", "--add-config", "a=b,c=d")
checkEntity("users", Sanitizer.sanitize("CN=user1") + "/clients/client1",
"--entity-name", "client1", "--entity-type", "clients", "--entity-name", "CN=user1", "--entity-type", "users",
"--alter", "--add-config", "a=b,c=d")
checkEntity("users", Sanitizer.sanitize("CN=user1") + "/clients",
"--entity-type", "clients", "--entity-name", "CN=user1", "--entity-type", "users",
"--describe")
checkEntity("users", "/clients",
"--entity-type", "clients", "--entity-type", "users",
"--describe")
checkEntity("users", Sanitizer.sanitize("CN=user1") + "/clients/" + Sanitizer.sanitize("client1?@%"),
"--entity-name", "client1?@%", "--entity-type", "clients", "--entity-name", "CN=user1", "--entity-type", "users",
"--alter", "--add-config", "a=b,c=d")
}
@Test
def testQuotaDescribeEntities() {
val zkClient: KafkaZkClient = EasyMock.createNiceMock(classOf[KafkaZkClient])
def checkEntities(opts: Array[String], expectedFetches: Map[String, Seq[String]], expectedEntityNames: Seq[String]) {
val entity = ConfigCommand.parseEntity(new ConfigCommandOptions(opts :+ "--describe"))
expectedFetches.foreach {
case (name, values) => EasyMock.expect(zkClient.getAllEntitiesWithConfig(name)).andReturn(values)
}
EasyMock.replay(zkClient)
val entities = entity.getAllEntities(zkClient)
assertEquals(expectedEntityNames, entities.map(e => e.fullSanitizedName))
EasyMock.reset(zkClient)
}
val clientId = "a-client"
val principal = "CN=ConfigCommandTest.testQuotaDescribeEntities , O=Apache, L=<default>"
val sanitizedPrincipal = Sanitizer.sanitize(principal)
val userClient = sanitizedPrincipal + "/clients/" + clientId
var opts = Array("--entity-type", "clients", "--entity-name", clientId)
checkEntities(opts, Map.empty, Seq(clientId))
opts = Array("--entity-type", "clients", "--entity-default")
checkEntities(opts, Map.empty, Seq("<default>"))
opts = Array("--entity-type", "clients")
checkEntities(opts, Map("clients" -> Seq(clientId)), Seq(clientId))
opts = Array("--entity-type", "users", "--entity-name", principal)
checkEntities(opts, Map.empty, Seq(sanitizedPrincipal))
opts = Array("--entity-type", "users", "--entity-default")
checkEntities(opts, Map.empty, Seq("<default>"))
opts = Array("--entity-type", "users")
checkEntities(opts, Map("users" -> Seq("<default>", sanitizedPrincipal)), Seq("<default>", sanitizedPrincipal))
opts = Array("--entity-type", "users", "--entity-name", principal, "--entity-type", "clients", "--entity-name", clientId)
checkEntities(opts, Map.empty, Seq(userClient))
opts = Array("--entity-type", "users", "--entity-name", principal, "--entity-type", "clients", "--entity-default")
checkEntities(opts, Map.empty, Seq(sanitizedPrincipal + "/clients/<default>"))
opts = Array("--entity-type", "users", "--entity-name", principal, "--entity-type", "clients")
checkEntities(opts,
Map("users/" + sanitizedPrincipal + "/clients" -> Seq("client-4")),
Seq(sanitizedPrincipal + "/clients/client-4"))
opts = Array("--entity-type", "users", "--entity-default", "--entity-type", "clients")
checkEntities(opts,
Map("users/<default>/clients" -> Seq("client-5")),
Seq("<default>/clients/client-5"))
opts = Array("--entity-type", "users", "--entity-type", "clients")
val userMap = Map("users/" + sanitizedPrincipal + "/clients" -> Seq("client-2"))
val defaultUserMap = Map("users/<default>/clients" -> Seq("client-3"))
checkEntities(opts,
Map("users" -> Seq("<default>", sanitizedPrincipal)) ++ defaultUserMap ++ userMap,
Seq("<default>/clients/client-3", sanitizedPrincipal + "/clients/client-2"))
}
private def registerBrokerInZk(id: Int): Unit = {
zkClient.createTopLevelPaths()
val securityProtocol = SecurityProtocol.PLAINTEXT
val endpoint = new EndPoint("localhost", 9092, ListenerName.forSecurityProtocol(securityProtocol), securityProtocol)
val brokerInfo = BrokerInfo(Broker(id, Seq(endpoint), rack = None), ApiVersion.latestVersion, jmxPort = 9192)
zkClient.registerBroker(brokerInfo)
}
class DummyAdminZkClient(zkClient: KafkaZkClient) extends AdminZkClient(zkClient) {
override def changeBrokerConfig(brokerIds: Seq[Int], configs: Properties): Unit = {}
override def fetchEntityConfig(entityType: String, entityName: String): Properties = {new Properties}
override def changeClientIdConfig(clientId: String, configs: Properties): Unit = {}
override def changeUserOrUserClientIdConfig(sanitizedEntityName: String, configs: Properties): Unit = {}
override def changeTopicConfig(topic: String, configs: Properties): Unit = {}
}
}
| gf53520/kafka | core/src/test/scala/unit/kafka/admin/ConfigCommandTest.scala | Scala | apache-2.0 | 31,441 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar
import quasar.effect.Failure
import pathy.Path.{File, Sandboxed}
import scalaz.EitherT
import scalaz.concurrent.Task
package object config {
type FsFile = FsPath[File, Sandboxed]
type CfgErr[A] = Failure[ConfigError, A]
type CfgErrT[F[_], A] = EitherT[F, ConfigError, A]
type CfgTask[A] = CfgErrT[Task, A]
}
| jedesah/Quasar | connector/src/main/scala/quasar/config/package.scala | Scala | apache-2.0 | 948 |
package net.janvsmachine.fpinscala
import org.scalatest.FlatSpec
class EitherTests extends FlatSpec {
import Either._
val ex = new Exception("Test exception")
val otherEx = new Exception("Other test exception")
"An error value" should "comply with spec" in {
val l: Either[Exception, String] = Left(ex)
assert(l.map(_.length) == l)
assert(l.flatMap(v => Right(v.length)) == l)
assert(l.flatMap(v => Left(otherEx)) == l)
assert(l.map2(Right("other"))((a, b) => "other") == l)
assert(l.map2(Left(otherEx))((a, b) => "other") == l)
assert(l.orElse { Right(42) } == Right(42))
assert(l.orElse { Left(otherEx) } == Left(otherEx))
}
"A successful value" should "comply with spec" in {
val r: Either[Exception, String] = Right("value")
assert(r.map(_.length) == Right(5))
assert(r.flatMap(v => Right(v.length)) == Right(5))
assert(r.flatMap(v => Left(ex)) == Left(ex))
assert(r.map2(Right("other"))((a, b) => a + b) == Right("valueother"))
assert(r.map2(Left(ex))((a, b) => a + b) == Left(ex))
assert(r.orElse { Right(42) } == r)
assert(r.orElse { Left(otherEx) } == r)
}
}
| stettix/fp-in-scala | src/test/scala/net/janvsmachine/fpinscala/EitherTests.scala | Scala | apache-2.0 | 1,160 |
/*§
===========================================================================
Chronos
===========================================================================
Copyright (C) 2015-2016 Gianluca Costa
===========================================================================
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===========================================================================
*/
package info.gianlucacosta.chronos.ast.expressions
import info.gianlucacosta.chronos.ast.{AstVisitor, Expression, Reference}
case class ReferenceValue(reference: Reference) extends Expression {
override def accept[T](visitor: AstVisitor[T]): T =
visitor.visit(this)
}
| giancosta86/Chronos | src/main/scala/info/gianlucacosta/chronos/ast/expressions/ReferenceValue.scala | Scala | apache-2.0 | 1,191 |
package ls
import scala.language.postfixOps
abstract class Props(resource: String) {
import scala.util.control.Exception.allCatch
trait Provider {
def get(k: String): String
}
object Env extends Provider {
def get(k: String) = System.getenv(k)
}
abstract class FallbackProvider(val fallback: Provider) extends Provider
case class JProps(resource: String) extends FallbackProvider(Env) {
lazy val props = {
val p = new java.util.Properties()
getClass().getResourceAsStream(resource) match {
case null => println("local resource %s not found. (it's okay fallback on env)" format resource)
case r => p.load(r)
}
p
}
def get(k: String) = props.getProperty(k) match {
case null => fallback.get(k)
case value => value
}
}
protected lazy val underlying = JProps(resource)
def get(name: String) = underlying.get(name) match {
case null => sys.error("undefined property %s" format name)
case value => value
}
def getInt(name: String) = allCatch.opt { get(name) toInt } match {
case None => sys.error("undefined int property %s" format name)
case Some(n) => n
}
def apply(name: String) = underlying.get(name) match {
case null => None
case value => Some(value)
}
def int(name: String) = apply(name).map(v => v.toInt)
}
object Props extends Props("/ls.properties")
| softprops/ls | library/src/main/scala/props.scala | Scala | mit | 1,403 |
package slamdata.engine
import scalaz._
import scalaz.std.vector._
import scalaz.std.list._
import scalaz.std.indexedSeq._
import scalaz.std.anyVal._
import scalaz.syntax.monad._
import scalaz.std.option._
import SemanticError.{TypeError, MissingField, MissingIndex}
import NonEmptyList.nel
import Validation.{success, failure}
sealed trait Type { self =>
import Type._
import scalaz.std.option._
import scalaz.syntax.traverse._
final def & (that: Type) = Type.Product(this, that)
final def | (that: Type) = Type.Coproduct(this, that)
final def lub: Type = mapUp(self) {
case x : Coproduct => x.flatten.reduce(Type.lub)
}
final def glb: Type = mapUp(self) {
case x : Coproduct => x.flatten.reduce(Type.glb)
}
final def contains(that: Type): Boolean = Type.typecheck(self, that).fold(Function.const(false), Function.const(true))
final def objectLike: Boolean = this match {
case Const(value) => value.dataType.objectLike
case AnonField(_) => true
case NamedField(_, _) => true
case x : Product => x.flatten.toList.exists(_.objectLike)
case x : Coproduct => x.flatten.toList.forall(_.objectLike)
case _ => false
}
final def arrayType: Option[Type] = this match {
case Const(value) => value.dataType.arrayType
case AnonElem(tpe) => Some(tpe)
case IndexedElem(_, tpe) => Some(tpe)
case x : Product => x.flatten.toList.map(_.arrayType).sequenceU.map(types => types.reduce(Type.lub _))
case x : Coproduct => x.flatten.toList.map(_.arrayType).sequenceU.map(types => types.reduce(Type.lub _))
case _ => None
}
final def arrayLike: Boolean = this match {
case Const(value) => value.dataType.arrayLike
case AnonElem(_) => true
case IndexedElem(_, _) => true
case x : Product => x.flatten.toList.exists(_.arrayLike)
case x : Coproduct => x.flatten.toList.forall(_.arrayLike)
case _ => false
}
final def setLike: Boolean = this match {
case Const(value) => value.dataType.setLike
case Set(_) => true
case _ => false
}
final def objectField(field: Type): ValidationNel[SemanticError, Type] = {
val missingFieldType = Top | Bottom
if (Type.lub(field, Str) != Str) failure(nel(TypeError(Str, field), Nil))
else (field, this) match {
case (Str, Const(Data.Obj(map))) => success(map.values.map(_.dataType).foldLeft[Type](Top)(Type.lub _))
case (Const(Data.Str(field)), Const(Data.Obj(map))) =>
// TODO: import toSuccess as method on Option (via ToOptionOps)?
toSuccess(map.get(field).map(Const(_)))(nel(MissingField(field), Nil))
case (Str, AnonField(value)) => success(value)
case (Const(Data.Str(_)), AnonField(value)) => success(value)
case (Str, NamedField(name, value)) => success(value | missingFieldType)
case (Const(Data.Str(field)), NamedField(name, value)) =>
success(if (field == name) value else missingFieldType)
case (_, x : Product) => {
// Note: this is not simple recursion because of the way we interpret NamedField types
// when the field name doesn't match. Any number of those can be present, and are ignored,
// unless there is nothing else.
// Still, there's certainly a more elegant way.
val v = x.flatten.toList.flatMap( t => {
val ot = t.objectField(field)
t match {
case NamedField(_, _) if (ot == success(missingFieldType)) => Nil
case _ => ot :: Nil
}
})
v match {
case Nil => success(missingFieldType)
case _ => {
implicit val and = Type.TypeAndMonoid
v.reduce(_ +++ _)
}
}
}
case (_, x : Coproduct) => {
// Note: this is not simple recursion because of the way we interpret NamedField types
// when the field name doesn't match. Any number of those can be present, and are ignored,
// unless there is nothing else.
// Still, there's certainly a more elegant way.
val v = x.flatten.toList.flatMap( t => {
val ot = t.objectField(field)
t match {
case NamedField(_, _) if (ot == success(missingFieldType)) => Nil
case _ => ot :: Nil
}
})
v match {
case Nil => success(missingFieldType)
case _ => {
implicit val or = Type.TypeOrMonoid
(success(missingFieldType) :: v).reduce(_ +++ _)
}
}
}
case _ => failure(nel(TypeError(AnyObject, this), Nil))
}
}
final def arrayElem(index: Type): ValidationNel[SemanticError, Type] = {
if (Type.lub(index, Int) != Int) failure(nel(TypeError(Int, index), Nil))
else (index, this) match {
case (Const(Data.Int(index)), Const(Data.Arr(arr))) =>
arr.lift(index.toInt).map(data => success(Const(data))).getOrElse(failure(nel(MissingIndex(index.toInt), Nil)))
case (Int, Const(Data.Arr(arr))) => success(arr.map(_.dataType).reduce(_ | _))
case (Int, AnonElem(value)) => success(value)
case (Const(Data.Int(_)), AnonElem(value)) => success(value)
case (Int, IndexedElem(_, value)) => success(value)
case (Const(Data.Int(index1)), IndexedElem(index2, value)) if (index1.toInt == index2) => success(value)
case (_, x : Product) =>
// TODO: needs to ignore failures that are IndexedElems, similar to what's happening
// in objectField.
implicit val or = TypeOrMonoid
x.flatten.toList.map(_.arrayElem(index)).reduce(_ +++ _)
case (_, x : Coproduct) =>
implicit val lub = Type.TypeLubSemigroup
x.flatten.toList.map(_.arrayElem(index)).reduce(_ +++ _)
case _ => failure(nel(TypeError(AnyArray, this), Nil))
}
}
}
trait TypeInstances {
val TypeOrMonoid = new Monoid[Type] {
def zero = Type.Top
def append(v1: Type, v2: => Type) = (v1, v2) match {
case (Type.Top, that) => that
case (this0, Type.Top) => this0
case _ => v1 | v2
}
}
val TypeAndMonoid = new Monoid[Type] {
def zero = Type.Top
def append(v1: Type, v2: => Type) = (v1, v2) match {
case (Type.Top, that) => that
case (this0, Type.Top) => this0
case _ => v1 & v2
}
}
val TypeLubSemigroup = new Semigroup[Type] {
def append(f1: Type, f2: => Type): Type = Type.lub(f1, f2)
}
implicit val TypeShow = new Show[Type] {
override def show(v: Type) = Cord(v.toString) // TODO
}
}
case object Type extends TypeInstances {
private def fail[A](expected: Type, actual: Type, message: Option[String]): ValidationNel[TypeError, A] =
Validation.failure(NonEmptyList(TypeError(expected, actual, message)))
private def fail[A](expected: Type, actual: Type): ValidationNel[TypeError, A] = fail(expected, actual, None)
private def fail[A](expected: Type, actual: Type, msg: String): ValidationNel[TypeError, A] = fail(expected, actual, Some(msg))
private def succeed[A](v: A): ValidationNel[TypeError, A] = Validation.success(v)
def simplify(tpe: Type): Type = mapUp(tpe) {
case x : Product => {
val ts = x.flatten.toList.filter(_ != Top)
if (ts.contains(Bottom)) Bottom else Product(ts.distinct)
}
case x : Coproduct => {
val ts = x.flatten.toList.filter(_ != Bottom)
if (ts.contains(Top)) Top else Coproduct(ts.distinct)
}
case x => x
}
def glb(left: Type, right: Type): Type = (left, right) match {
case (left, right) if left == right => left
case (left, right) if left contains right => left
case (left, right) if right contains left => right
case _ => Bottom
}
def lub(left: Type, right: Type): Type = (left, right) match {
case (left, right) if left == right => left
case (left, right) if left contains right => right
case (left, right) if right contains left => left
case _ => Top
}
def typecheck(expected: Type, actual: Type): ValidationNel[TypeError, Unit] = (expected, actual) match {
case (expected, actual) if (expected == actual) => succeed(Unit)
case (Top, actual) => succeed(Unit)
case (Const(expected), actual) => typecheck(expected.dataType, actual)
case (expected, Const(actual)) => typecheck(expected, actual.dataType)
case (expected : Product, actual : Product) => typecheckPP(expected.flatten, actual.flatten)
case (expected : Product, actual : Coproduct) => typecheckPC(expected.flatten, actual.flatten)
case (expected : Coproduct, actual : Product) => typecheckCP(expected.flatten, actual.flatten)
case (expected : Coproduct, actual : Coproduct) => typecheckCC(expected.flatten, actual.flatten)
case (AnonField(expected), AnonField(actual)) => typecheck(expected, actual)
case (AnonField(expected), NamedField(name, actual)) => typecheck(expected, actual)
case (NamedField(name, expected), AnonField(actual)) => typecheck(expected, actual)
case (NamedField(name1, expected), NamedField(name2, actual)) if (name1 == name2) => typecheck(expected, actual)
case (AnonElem(expected), AnonElem(actual)) => typecheck(expected, actual)
case (AnonElem(expected), IndexedElem(idx, actual)) => typecheck(expected, actual)
case (IndexedElem(idx, expected), AnonElem(actual)) => typecheck(expected, actual)
case (IndexedElem(idx1, expected), IndexedElem(idx2, actual)) if (idx1 == idx2) => typecheck(expected, actual)
case (Set(expected), Set(actual)) => typecheck(expected, actual)
case (expected, actual : Coproduct) => typecheckPC(expected :: Nil, actual.flatten)
case (expected : Coproduct, actual) => typecheckCP(expected.flatten, actual :: Nil)
case (expected, actual : Product) => typecheckPP(expected :: Nil, actual.flatten)
case (expected : Product, actual) => typecheckPP(expected.flatten, actual :: Nil)
case _ => fail(expected, actual)
}
def children(v: Type): List[Type] = v match {
case Top => Nil
case Bottom => Nil
case Const(value) => value.dataType :: Nil
case Null => Nil
case Str => Nil
case Int => Nil
case Dec => Nil
case Bool => Nil
case Binary => Nil
case DateTime => Nil
case Interval => Nil
case Set(value) => value :: Nil
case AnonElem(value) => value :: Nil
case IndexedElem(index, value) => value :: Nil
case AnonField(value) => value :: Nil
case NamedField(name, value) => value :: Nil
case x : Product => x.flatten.toList
case x : Coproduct => x.flatten.toList
}
def foldMap[Z: Monoid](f: Type => Z)(v: Type): Z = Monoid[Z].append(f(v), Foldable[List].foldMap(children(v))(foldMap(f)))
def mapUp(v: Type)(f: PartialFunction[Type, Type]): Type = {
val f0 = f.orElse[Type, Type] {
case x => x
}
mapUpM[scalaz.Id.Id](v)(f0)
}
def mapUpM[F[_]: Monad](v: Type)(f: Type => F[Type]): F[Type] = {
def loop(v: Type): F[Type] = v match {
case Const(value) =>
for {
newType <- f(value.dataType)
newType2 <- if (newType != value.dataType) Monad[F].point(newType)
else f(v)
} yield newType2
case Set(value) => wrap(value, Set)
case AnonElem(value) => wrap(value, AnonElem)
case IndexedElem(idx, value) => wrap(value, IndexedElem(idx, _))
case AnonField(value) => wrap(value, AnonField)
case NamedField(name, value) => wrap(value, NamedField(name, _))
case x : Product =>
for {
xs <- Traverse[List].sequence(x.flatten.toList.map(loop _))
v2 <- f(Product(xs))
} yield v2
case x : Coproduct =>
for {
xs <- Traverse[List].sequence(x.flatten.toList.map(loop _))
v2 <- f(Coproduct(xs))
} yield v2
case _ => f(v)
}
def wrap(v0: Type, constr: Type => Type) =
for {
v1 <- loop(v0)
v2 <- f(constr(v1))
} yield v2
loop(v)
}
case object Top extends Type
case object Bottom extends Type
case class Const(value: Data) extends Type
sealed trait PrimitiveType extends Type
case object Null extends PrimitiveType
case object Str extends PrimitiveType
case object Int extends PrimitiveType
case object Dec extends PrimitiveType
case object Bool extends PrimitiveType
case object Binary extends PrimitiveType
case object DateTime extends PrimitiveType
case object Interval extends PrimitiveType
case class Set(value: Type) extends Type
case class AnonElem(value: Type) extends Type
case class IndexedElem(index: Int, value: Type) extends Type
case class AnonField(value: Type) extends Type
case class NamedField(name: String, value: Type) extends Type
case class Product(left: Type, right: Type) extends Type {
def flatten: Vector[Type] = {
def flatten0(v: Type): Vector[Type] = v match {
case Product(left, right) => flatten0(left) ++ flatten0(right)
case x => Vector(x)
}
flatten0(this)
}
override def hashCode = flatten.toSet.hashCode()
override def equals(that: Any) = that match {
case that : Product => this.flatten.toSet.equals(that.flatten.toSet)
case _ => false
}
}
object Product extends ((Type, Type) => Type) {
def apply(values: Seq[Type]): Type = {
if (values.length == 0) Top
else if (values.length == 1) values.head
else values.tail.foldLeft[Type](values.head)(_ & _)
}
}
case class Coproduct(left: Type, right: Type) extends Type {
def flatten: Vector[Type] = {
def flatten0(v: Type): Vector[Type] = v match {
case Coproduct(left, right) => flatten0(left) ++ flatten0(right)
case x => Vector(x)
}
flatten0(this)
}
override def hashCode = flatten.toSet.hashCode()
override def equals(that: Any) = that match {
case that : Coproduct => this.flatten.toSet.equals(that.flatten.toSet)
case _ => false
}
}
object Coproduct extends ((Type, Type) => Type) {
def apply(values: Seq[Type]): Type = {
if (values.length == 0) Bottom
else if (values.length == 1) values.head
else values.tail.foldLeft[Type](values.head)(_ | _)
}
}
private def exists(expected: Type, actuals: Seq[Type]): ValidationNel[TypeError, Unit] = actuals.headOption match {
case Some(head) => typecheck(expected, head) ||| exists(expected, actuals.tail)
case None => fail(expected, Product(actuals))
}
private def forall(expected: Type, actuals: Seq[Type]): ValidationNel[TypeError, Unit] = {
actuals.headOption match {
case Some(head) => typecheck(expected, head) +++ exists(expected, actuals.tail)
case None => Validation.success(Top)
}
}
private val typecheckPP = typecheck(_ +++ _, exists _)
private val typecheckPC = typecheck(_ +++ _, forall _)
private val typecheckCP = typecheck(_ ||| _, exists _)
private val typecheckCC = typecheck(_ ||| _, forall _)
private def typecheck(combine: (ValidationNel[TypeError, Unit], ValidationNel[TypeError, Unit]) => ValidationNel[TypeError, Unit],
check: (Type, Seq[Type]) => ValidationNel[TypeError, Unit]) = (expecteds: Seq[Type], actuals: Seq[Type]) => {
expecteds.foldLeft[ValidationNel[TypeError, Unit]](Validation.success(Unit)) {
case (acc, expected) => {
combine(acc, check(expected, actuals))
}
}
}
def makeObject(values: Iterable[(String, Type)]) = Product.apply(values.toList.map((NamedField.apply _).tupled))
def makeArray(values: List[Type]): Type = {
val consts = values.collect { case Const(data) => data }
if (consts.length == values.length) Const(Data.Arr(consts))
else Product(values.zipWithIndex.map(t => IndexedElem(t._2, t._1)))
}
val AnyArray = AnonElem(Top)
val AnyObject = AnonField(Top)
val AnySet = Set(Top)
val Numeric = Int | Dec
val Comparable = Numeric | Str | DateTime | Interval | Bool
} | sellout/slamengine-old | src/main/scala/slamdata/engine/types.scala | Scala | agpl-3.0 | 15,979 |
package embed.randP
import com.tencent.angel.worker.task.{BaseTask, TaskContext}
import org.apache.commons.logging.LogFactory
import org.apache.hadoop.io.{LongWritable, Text}
import scala.collection.mutable.ArrayBuffer
/**
* Created by chris on 9/19/17.
*/
class RTrainTask(val ctx:TaskContext) extends BaseTask[LongWritable,Text,Text](ctx) {
private val LOG = LogFactory.getLog(classOf[RTrainTask])
override
def parse(key_in: LongWritable, value_in: Text): Text = ???
override
def preProcess(taskContext: TaskContext): Unit = {}
@throws[Exception]
def run(ctx:TaskContext) :Unit = {
val reader = ctx.getReader[LongWritable,Text]
var rows = new ArrayBuffer[Row]()
var did = 0
var N = 0
while(reader.nextKeyValue()) {
val text = reader.getCurrentValue
val row = new Row(text.toString)
if(row != null) {
rows.+= (row)
did += 1
N += row.len
}
}
reader.close()
val model = new RModel(ctx.getConf,ctx)
LOG.info(s"Feature=${model.F} Components=${model.R} "
+ s"S=${model.S} Batch=${model.batchSize} Entries=$N "
+ s"threadNum=${model.threadNum}")
val data = new Matrix(rows.length)
data.build(rows)
rows.clear()
val learner = new RLearner(ctx,model,data)
learner.scheduleInit()
learner.scheduleMultiply()
}
}
| LiteML/EmbedLib | src/main/scala/embed/randP/RTrainTask.scala | Scala | apache-2.0 | 1,353 |
package com.twitter.finagle.serverset2
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.FunSuite
@RunWith(classOf[JUnitRunner])
class ServiceDiscovererTest extends FunSuite {
def ep(port: Int) = Endpoint(Array(null), "localhost", port, Int.MinValue, Endpoint.Status.Alive, port.toString)
test("ServiceDiscoverer.zipWithWeights") {
val port1 = 80 // not bound
val port2 = 53 // ditto
val ents = Seq[Entry](ep(port1), ep(port2), ep(3), ep(4))
val v1 = Vector(Seq(
Descriptor(Selector.Host("localhost", port1), 1.1, 1),
Descriptor(Selector.Host("localhost", port2), 1.4, 1),
Descriptor(Selector.Member("3"), 3.1, 1)))
val v2 = Vector(Seq(Descriptor(Selector.Member(port2.toString), 2.0, 1)))
val vecs = Set(v1, v2)
assert(ServiceDiscoverer.zipWithWeights(ents, vecs).toSet === Set(
ep(port1) -> 1.1,
ep(port2) -> 2.8,
ep(3) -> 3.1,
ep(4) -> 1.0))
}
}
| kingtang/finagle | finagle-serversets/src/test/scala/com/twitter/finagle/serverset2/ServiceDiscovererTest.scala | Scala | apache-2.0 | 971 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.api
import java.nio._
import kafka.utils._
import kafka.api.ApiUtils._
import kafka.cluster.BrokerEndPoint
import kafka.controller.LeaderIsrAndControllerEpoch
import kafka.network.{BoundedByteBufferSend, RequestChannel}
import kafka.common.ErrorMapping
import kafka.network.RequestChannel.Response
import collection.Set
object LeaderAndIsr {
val initialLeaderEpoch: Int = 0
val initialZKVersion: Int = 0
val NoLeader = -1
val LeaderDuringDelete = -2
}
case class LeaderAndIsr(var leader: Int, var leaderEpoch: Int, var isr: List[Int], var zkVersion: Int) {
def this(leader: Int, isr: List[Int]) = this(leader, LeaderAndIsr.initialLeaderEpoch, isr, LeaderAndIsr.initialZKVersion)
override def toString(): String = {
Json.encode(Map("leader" -> leader, "leader_epoch" -> leaderEpoch, "isr" -> isr))
}
}
object PartitionStateInfo {
def readFrom(buffer: ByteBuffer): PartitionStateInfo = {
val controllerEpoch = buffer.getInt
val leader = buffer.getInt
val leaderEpoch = buffer.getInt
val isrSize = buffer.getInt
val isr = for(i <- 0 until isrSize) yield buffer.getInt
val zkVersion = buffer.getInt
val replicationFactor = buffer.getInt
val replicas = for(i <- 0 until replicationFactor) yield buffer.getInt
PartitionStateInfo(LeaderIsrAndControllerEpoch(LeaderAndIsr(leader, leaderEpoch, isr.toList, zkVersion), controllerEpoch),
replicas.toSet)
}
}
case class PartitionStateInfo(leaderIsrAndControllerEpoch: LeaderIsrAndControllerEpoch,
allReplicas: Set[Int]) {
def replicationFactor = allReplicas.size
def writeTo(buffer: ByteBuffer) {
buffer.putInt(leaderIsrAndControllerEpoch.controllerEpoch)
buffer.putInt(leaderIsrAndControllerEpoch.leaderAndIsr.leader)
buffer.putInt(leaderIsrAndControllerEpoch.leaderAndIsr.leaderEpoch)
buffer.putInt(leaderIsrAndControllerEpoch.leaderAndIsr.isr.size)
leaderIsrAndControllerEpoch.leaderAndIsr.isr.foreach(buffer.putInt(_))
buffer.putInt(leaderIsrAndControllerEpoch.leaderAndIsr.zkVersion)
buffer.putInt(replicationFactor)
allReplicas.foreach(buffer.putInt(_))
}
def sizeInBytes(): Int = {
val size =
4 /* epoch of the controller that elected the leader */ +
4 /* leader broker id */ +
4 /* leader epoch */ +
4 /* number of replicas in isr */ +
4 * leaderIsrAndControllerEpoch.leaderAndIsr.isr.size /* replicas in isr */ +
4 /* zk version */ +
4 /* replication factor */ +
allReplicas.size * 4
size
}
override def toString(): String = {
val partitionStateInfo = new StringBuilder
partitionStateInfo.append("(LeaderAndIsrInfo:" + leaderIsrAndControllerEpoch.toString)
partitionStateInfo.append(",ReplicationFactor:" + replicationFactor + ")")
partitionStateInfo.append(",AllReplicas:" + allReplicas.mkString(",") + ")")
partitionStateInfo.toString()
}
}
object LeaderAndIsrRequest {
val CurrentVersion = 0.shortValue
val IsInit: Boolean = true
val NotInit: Boolean = false
val DefaultAckTimeout: Int = 1000
def readFrom(buffer: ByteBuffer): LeaderAndIsrRequest = {
val versionId = buffer.getShort
val correlationId = buffer.getInt
val clientId = readShortString(buffer)
val controllerId = buffer.getInt
val controllerEpoch = buffer.getInt
val partitionStateInfosCount = buffer.getInt
val partitionStateInfos = new collection.mutable.HashMap[(String, Int), PartitionStateInfo]
for(i <- 0 until partitionStateInfosCount){
val topic = readShortString(buffer)
val partition = buffer.getInt
val partitionStateInfo = PartitionStateInfo.readFrom(buffer)
partitionStateInfos.put((topic, partition), partitionStateInfo)
}
val leadersCount = buffer.getInt
var leaders = Set[BrokerEndPoint]()
for (i <- 0 until leadersCount)
leaders += BrokerEndPoint.readFrom(buffer)
new LeaderAndIsrRequest(versionId, correlationId, clientId, controllerId, controllerEpoch, partitionStateInfos.toMap, leaders)
}
}
case class LeaderAndIsrRequest (versionId: Short,
correlationId: Int,
clientId: String,
controllerId: Int,
controllerEpoch: Int,
partitionStateInfos: Map[(String, Int), PartitionStateInfo],
leaders: Set[BrokerEndPoint])
extends RequestOrResponse(Some(RequestKeys.LeaderAndIsrKey)) {
def this(partitionStateInfos: Map[(String, Int), PartitionStateInfo], leaders: Set[BrokerEndPoint], controllerId: Int,
controllerEpoch: Int, correlationId: Int, clientId: String) = {
this(LeaderAndIsrRequest.CurrentVersion, correlationId, clientId,
controllerId, controllerEpoch, partitionStateInfos, leaders)
}
def writeTo(buffer: ByteBuffer) {
buffer.putShort(versionId)
buffer.putInt(correlationId)
writeShortString(buffer, clientId)
buffer.putInt(controllerId)
buffer.putInt(controllerEpoch)
buffer.putInt(partitionStateInfos.size)
for((key, value) <- partitionStateInfos){
writeShortString(buffer, key._1)
buffer.putInt(key._2)
value.writeTo(buffer)
}
buffer.putInt(leaders.size)
leaders.foreach(_.writeTo(buffer))
}
def sizeInBytes(): Int = {
var size =
2 /* version id */ +
4 /* correlation id */ +
(2 + clientId.length) /* client id */ +
4 /* controller id */ +
4 /* controller epoch */ +
4 /* number of partitions */
for((key, value) <- partitionStateInfos)
size += (2 + key._1.length) /* topic */ + 4 /* partition */ + value.sizeInBytes /* partition state info */
size += 4 /* number of leader brokers */
for(broker <- leaders)
size += broker.sizeInBytes /* broker info */
size
}
override def toString(): String = {
describe(true)
}
override def handleError(e: Throwable, requestChannel: RequestChannel, request: RequestChannel.Request): Unit = {
val responseMap = partitionStateInfos.map {
case (topicAndPartition, partitionAndState) => (topicAndPartition, ErrorMapping.codeFor(e.getClass.asInstanceOf[Class[Throwable]]))
}
val errorResponse = LeaderAndIsrResponse(correlationId, responseMap)
requestChannel.sendResponse(new Response(request, new BoundedByteBufferSend(errorResponse)))
}
override def describe(details: Boolean): String = {
val leaderAndIsrRequest = new StringBuilder
leaderAndIsrRequest.append("Name:" + this.getClass.getSimpleName)
leaderAndIsrRequest.append(";Version:" + versionId)
leaderAndIsrRequest.append(";Controller:" + controllerId)
leaderAndIsrRequest.append(";ControllerEpoch:" + controllerEpoch)
leaderAndIsrRequest.append(";CorrelationId:" + correlationId)
leaderAndIsrRequest.append(";ClientId:" + clientId)
leaderAndIsrRequest.append(";Leaders:" + leaders.mkString(","))
if(details)
leaderAndIsrRequest.append(";PartitionState:" + partitionStateInfos.mkString(","))
leaderAndIsrRequest.toString()
}
}
| crashlytics/kafka | core/src/main/scala/kafka/api/LeaderAndIsrRequest.scala | Scala | apache-2.0 | 7,967 |
class C {
class W
def getZ(w: W): Int = 123
def getZ(s: String): String = "text"
}
class A(f: C) {
private val w = new f.W
/*start*/f.getZ(w)/*end*/
}
//Int | ilinum/intellij-scala | testdata/typeInference/dependent/PrivateClassParameter.scala | Scala | apache-2.0 | 168 |
package controllers
import play.api.mvc.{Controller, Action}
import play.api.data.Form
import play.api.data.Forms._
import play.api.libs.json.Json
import models.Member
import models.CustomData
object ApiController extends Controller {
def exist(category: String, name: String) = Action {
val x = category match {
case "uname" => Right(Member.selectByUname(_))
case "email" => Right(Member.selectByEmail(_))
case _ => Left("no such request point.")
}
x.right.map { f =>
f(name) match {
case Some(member) => (Json.toJson(Map("exist" -> 1)))
case None => (Json.toJson(Map("exist" -> 0)))
}
} match {
case Right(json) => Ok(json)
case Left(message) => BadRequest(Json.toJson(Map("error" -> message)))
}
}
val cssJsForm = Form(tuple(
"token" -> text,
"purpose" -> text,
"contentType" -> text,
"content" -> text
))
def cssOrJsPost = Action {
implicit request => {
val (token, purpose, contentType, content) = cssJsForm.bindFromRequest.get
val uname = request.session.get("uname").getOrElse("")
val memberId = Member.selectByUname(uname).get.id
try {
if (token != (request.session.get("token").getOrElse(""))) throw new Exception("CSRFトークンが一致しません。")
contentType match {
case "css" => CustomData.saveCss(memberId.toLong, purpose, content)
case "js" => CustomData.saveJs(memberId.toLong, purpose, content)
case _ => throw new Exception("contentType が不正です。")
}
Ok(Json.toJson(Map("success" -> Json.toJson(1))))
} catch {
case e: Exception =>
BadRequest(Json.toJson(Map("success" -> Json.toJson(0), "message" -> Json.toJson("エラー: " + e))))
}
}
}
def css(purpose: String, name: String) = Action { getCssOrJs(purpose, name, "css") }
def js(purpose: String, name: String) = Action { getCssOrJs(purpose, name, "js") }
def jsPost = TODO
private def getCssOrJs(purpose: String, name: String, contentType: String) = {
// TODO: ここで例外を吐くよりはJSONでエラーを返す方がベター
if (purpose != "list" && purpose != "page") throw new Exception("purpose は list か page のみサポートしています。")
if (contentType != "css" && contentType!= "js") throw new Exception("contentType は css か jsのみサポートしています。")
val memberId = Member.selectByUname(name).get.id
Ok(Json.toJson(
Map(
"success" -> Json.toJson(1),
"code" -> Json.toJson(
if (contentType == "css") CustomData.loadCss(memberId, purpose) else CustomData.loadJs(memberId, purpose)
)
)))
}
}
| mahata/webnikki | src/app/controllers/ApiController.scala | Scala | mit | 2,788 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming.api.python
import java.io.{ObjectInputStream, ObjectOutputStream}
import java.lang.reflect.Proxy
import java.util.{ArrayList => JArrayList, List => JList}
import scala.collection.JavaConverters._
import scala.language.existentials
import py4j.Py4JException
import org.apache.spark.SparkException
import org.apache.spark.api.java._
import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.storage.StorageLevel
import org.apache.spark.streaming.{Duration, Interval, StreamingContext, Time}
import org.apache.spark.streaming.api.java._
import org.apache.spark.streaming.dstream._
import org.apache.spark.util.Utils
/**
* Interface for Python callback function which is used to transform RDDs
*/
private[python] trait PythonTransformFunction {
def call(time: Long, rdds: JList[_]): JavaRDD[Array[Byte]]
/**
* Get the failure, if any, in the last call to `call`.
*
* @return the failure message if there was a failure, or `null` if there was no failure.
*/
def getLastFailure: String
}
/**
* Interface for Python Serializer to serialize PythonTransformFunction
*/
private[python] trait PythonTransformFunctionSerializer {
def dumps(id: String): Array[Byte]
def loads(bytes: Array[Byte]): PythonTransformFunction
/**
* Get the failure, if any, in the last call to `dumps` or `loads`.
*
* @return the failure message if there was a failure, or `null` if there was no failure.
*/
def getLastFailure: String
}
/**
* Wraps a PythonTransformFunction (which is a Python object accessed through Py4J)
* so that it looks like a Scala function and can be transparently serialized and
* deserialized by Java.
*/
private[python] class TransformFunction(@transient var pfunc: PythonTransformFunction)
extends function.Function2[JList[JavaRDD[_]], Time, JavaRDD[Array[Byte]]] {
def apply(rdd: Option[RDD[_]], time: Time): Option[RDD[Array[Byte]]] = {
val rdds = List(rdd.map(JavaRDD.fromRDD(_)).orNull).asJava
Option(callPythonTransformFunction(time.milliseconds, rdds)).map(_.rdd)
}
def apply(rdd: Option[RDD[_]], rdd2: Option[RDD[_]], time: Time): Option[RDD[Array[Byte]]] = {
val rdds = List(rdd.map(JavaRDD.fromRDD(_)).orNull, rdd2.map(JavaRDD.fromRDD(_)).orNull).asJava
Option(callPythonTransformFunction(time.milliseconds, rdds)).map(_.rdd)
}
// for function.Function2
def call(rdds: JList[JavaRDD[_]], time: Time): JavaRDD[Array[Byte]] = {
callPythonTransformFunction(time.milliseconds, rdds)
}
private def callPythonTransformFunction(time: Long, rdds: JList[_]): JavaRDD[Array[Byte]] = {
val resultRDD = pfunc.call(time, rdds)
val failure = pfunc.getLastFailure
if (failure != null) {
throw new SparkException("An exception was raised by Python:\n" + failure)
}
resultRDD
}
private def writeObject(out: ObjectOutputStream): Unit = Utils.tryOrIOException {
val bytes = PythonTransformFunctionSerializer.serialize(pfunc)
out.writeInt(bytes.length)
out.write(bytes)
}
private def readObject(in: ObjectInputStream): Unit = Utils.tryOrIOException {
val length = in.readInt()
val bytes = new Array[Byte](length)
in.readFully(bytes)
pfunc = PythonTransformFunctionSerializer.deserialize(bytes)
}
}
/**
* Helpers for PythonTransformFunctionSerializer
*
* PythonTransformFunctionSerializer is logically a singleton that's happens to be
* implemented as a Python object.
*/
private[python] object PythonTransformFunctionSerializer {
/**
* A serializer in Python, used to serialize PythonTransformFunction
*/
private var serializer: PythonTransformFunctionSerializer = _
/*
* Register a serializer from Python, should be called during initialization
*/
def register(ser: PythonTransformFunctionSerializer): Unit = synchronized {
serializer = ser
}
def serialize(func: PythonTransformFunction): Array[Byte] = synchronized {
require(serializer != null, "Serializer has not been registered!")
// get the id of PythonTransformFunction in py4j
val h = Proxy.getInvocationHandler(func.asInstanceOf[Proxy])
val f = h.getClass().getDeclaredField("id")
f.setAccessible(true)
val id = f.get(h).asInstanceOf[String]
val results = serializer.dumps(id)
val failure = serializer.getLastFailure
if (failure != null) {
throw new SparkException("An exception was raised by Python:\n" + failure)
}
results
}
def deserialize(bytes: Array[Byte]): PythonTransformFunction = synchronized {
require(serializer != null, "Serializer has not been registered!")
val pfunc = serializer.loads(bytes)
val failure = serializer.getLastFailure
if (failure != null) {
throw new SparkException("An exception was raised by Python:\n" + failure)
}
pfunc
}
}
/**
* Helper functions, which are called from Python via Py4J.
*/
private[streaming] object PythonDStream {
/**
* cannot access PythonTransformFunctionSerializer.register() via Py4j
* Py4JError: PythonTransformFunctionSerializerregister does not exist in the JVM
*/
def registerSerializer(ser: PythonTransformFunctionSerializer): Unit = {
PythonTransformFunctionSerializer.register(ser)
}
/**
* helper function for DStream.foreachRDD(),
* cannot be `foreachRDD`, it will confusing py4j
*/
def callForeachRDD(jdstream: JavaDStream[Array[Byte]], pfunc: PythonTransformFunction): Unit = {
val func = new TransformFunction((pfunc))
jdstream.dstream.foreachRDD((rdd, time) => func(Some(rdd), time))
}
/**
* convert list of RDD into queue of RDDs, for ssc.queueStream()
*/
def toRDDQueue(rdds: JArrayList[JavaRDD[Array[Byte]]]): java.util.Queue[JavaRDD[Array[Byte]]] = {
val queue = new java.util.LinkedList[JavaRDD[Array[Byte]]]
rdds.asScala.foreach(queue.add)
queue
}
/**
* Stop [[StreamingContext]] if the Python process crashes (E.g., OOM) in case the user cannot
* stop it in the Python side.
*/
def stopStreamingContextIfPythonProcessIsDead(e: Throwable): Unit = {
// These two special messages are from:
// scalastyle:off
// https://github.com/bartdag/py4j/blob/5cbb15a21f857e8cf334ce5f675f5543472f72eb/py4j-java/src/main/java/py4j/CallbackClient.java#L218
// https://github.com/bartdag/py4j/blob/5cbb15a21f857e8cf334ce5f675f5543472f72eb/py4j-java/src/main/java/py4j/CallbackClient.java#L340
// scalastyle:on
if (e.isInstanceOf[Py4JException] &&
("Cannot obtain a new communication channel" == e.getMessage ||
"Error while obtaining a new communication channel" == e.getMessage)) {
// Start a new thread to stop StreamingContext to avoid deadlock.
new Thread("Stop-StreamingContext") with Logging {
setDaemon(true)
override def run(): Unit = {
logError(
"Cannot connect to Python process. It's probably dead. Stopping StreamingContext.", e)
StreamingContext.getActive().foreach(_.stop(stopSparkContext = false))
}
}.start()
}
}
}
/**
* Base class for PythonDStream with some common methods
*/
private[python] abstract class PythonDStream(
parent: DStream[_],
pfunc: PythonTransformFunction)
extends DStream[Array[Byte]] (parent.ssc) {
val func = new TransformFunction(pfunc)
override def dependencies: List[DStream[_]] = List(parent)
override def slideDuration: Duration = parent.slideDuration
val asJavaDStream: JavaDStream[Array[Byte]] = JavaDStream.fromDStream(this)
}
/**
* Transformed DStream in Python.
*/
private[python] class PythonTransformedDStream (
parent: DStream[_],
pfunc: PythonTransformFunction)
extends PythonDStream(parent, pfunc) {
override def compute(validTime: Time): Option[RDD[Array[Byte]]] = {
val rdd = parent.getOrCompute(validTime)
if (rdd.isDefined) {
func(rdd, validTime)
} else {
None
}
}
}
/**
* Transformed from two DStreams in Python.
*/
private[python] class PythonTransformed2DStream(
parent: DStream[_],
parent2: DStream[_],
pfunc: PythonTransformFunction)
extends DStream[Array[Byte]] (parent.ssc) {
val func = new TransformFunction(pfunc)
override def dependencies: List[DStream[_]] = List(parent, parent2)
override def slideDuration: Duration = parent.slideDuration
override def compute(validTime: Time): Option[RDD[Array[Byte]]] = {
val empty: RDD[_] = ssc.sparkContext.emptyRDD
val rdd1 = parent.getOrCompute(validTime).getOrElse(empty)
val rdd2 = parent2.getOrCompute(validTime).getOrElse(empty)
func(Some(rdd1), Some(rdd2), validTime)
}
val asJavaDStream: JavaDStream[Array[Byte]] = JavaDStream.fromDStream(this)
}
/**
* similar to StateDStream
*/
private[python] class PythonStateDStream(
parent: DStream[Array[Byte]],
reduceFunc: PythonTransformFunction,
initialRDD: Option[RDD[Array[Byte]]])
extends PythonDStream(parent, reduceFunc) {
def this(
parent: DStream[Array[Byte]],
reduceFunc: PythonTransformFunction) = this(parent, reduceFunc, None)
def this(
parent: DStream[Array[Byte]],
reduceFunc: PythonTransformFunction,
initialRDD: JavaRDD[Array[Byte]]) = this(parent, reduceFunc, Some(initialRDD.rdd))
super.persist(StorageLevel.MEMORY_ONLY)
override val mustCheckpoint = true
override def compute(validTime: Time): Option[RDD[Array[Byte]]] = {
val lastState = getOrCompute(validTime - slideDuration)
val rdd = parent.getOrCompute(validTime)
if (rdd.isDefined) {
func(lastState.orElse(initialRDD), rdd, validTime)
} else {
lastState
}
}
}
/**
* similar to ReducedWindowedDStream
*/
private[python] class PythonReducedWindowedDStream(
parent: DStream[Array[Byte]],
preduceFunc: PythonTransformFunction,
@transient private val pinvReduceFunc: PythonTransformFunction,
_windowDuration: Duration,
_slideDuration: Duration)
extends PythonDStream(parent, preduceFunc) {
super.persist(StorageLevel.MEMORY_ONLY)
override val mustCheckpoint: Boolean = true
val invReduceFunc: TransformFunction = new TransformFunction(pinvReduceFunc)
def windowDuration: Duration = _windowDuration
override def slideDuration: Duration = _slideDuration
override def parentRememberDuration: Duration = rememberDuration + windowDuration
override def compute(validTime: Time): Option[RDD[Array[Byte]]] = {
val currentTime = validTime
val current = new Interval(currentTime - windowDuration, currentTime)
val previous = current - slideDuration
// _____________________________
// | previous window _________|___________________
// |___________________| current window | --------------> Time
// |_____________________________|
//
// |________ _________| |________ _________|
// | |
// V V
// old RDDs new RDDs
//
val previousRDD = getOrCompute(previous.endTime)
// for small window, reduce once will be better than twice
if (pinvReduceFunc != null && previousRDD.isDefined
&& windowDuration >= slideDuration * 5) {
// subtract the values from old RDDs
val oldRDDs = parent.slice(previous.beginTime + parent.slideDuration, current.beginTime)
val subtracted = if (oldRDDs.size > 0) {
invReduceFunc(previousRDD, Some(ssc.sc.union(oldRDDs)), validTime)
} else {
previousRDD
}
// add the RDDs of the reduced values in "new time steps"
val newRDDs = parent.slice(previous.endTime + parent.slideDuration, current.endTime)
if (newRDDs.size > 0) {
func(subtracted, Some(ssc.sc.union(newRDDs)), validTime)
} else {
subtracted
}
} else {
// Get the RDDs of the reduced values in current window
val currentRDDs = parent.slice(current.beginTime + parent.slideDuration, current.endTime)
if (currentRDDs.size > 0) {
func(None, Some(ssc.sc.union(currentRDDs)), validTime)
} else {
None
}
}
}
}
| maropu/spark | streaming/src/main/scala/org/apache/spark/streaming/api/python/PythonDStream.scala | Scala | apache-2.0 | 12,991 |
package utils.pageobjects.tests
import org.specs2.mutable._
import utils.WithApplication
import utils.pageobjects.{PageObjectsContext, CircumstancesPageFactory, UnknownPage, ClaimPageFactory}
import utils.pageobjects.s_eligibility.GBenefitsPage
import utils.pageobjects.circumstances.start_of_process.GCircsYourDetailsPage
/**
* To change this template use Preferences | File and Code Templates.
* @author Jorge Migueis
* Date: 12/07/2013
*/
class PageFactorySpec extends Specification {
section("unit")
"The PageFactory" should {
"Return an UnknownPage if it does not recognise title" in new WithApplication with MockPageContext {
val newPage = ClaimPageFactory buildPageFromUrl("/unknown",PageObjectsContext(browser))
newPage must beAnInstanceOf[UnknownPage]
}
"Return a BenefitPage if provided Benefits page title" in new WithApplication with MockPageContext {
val newPage = ClaimPageFactory buildPageFromUrl(GBenefitsPage.url,PageObjectsContext(browser))
newPage must beAnInstanceOf[GBenefitsPage]
}
"Return an AboutYouPage if provided AboutYouPage page title" in new WithApplication with MockPageContext {
val newPage = CircumstancesPageFactory buildPageFromUrl(GCircsYourDetailsPage.url,PageObjectsContext(browser))
newPage must beAnInstanceOf[GCircsYourDetailsPage]
}
}
section("unit")
}
| Department-for-Work-and-Pensions/ClaimCapture | c3/test/utils/pageobjects/tests/PageFactorySpec.scala | Scala | mit | 1,385 |
object main extends App {
/*
* Sealed classes:
* A sealed class cannot have any new subclasses added
* expect the ones defined in the same file.
*
*/
sealed abstract class Expr
case class Var(name: String) extends Expr
case class Number(num: Double) extends Expr
case class UnOp(operator: String, arg: Expr) extends Expr
case class BinOp(operator: String, left: Expr, right: Expr) extends Expr
/*
* Defining a pattern that does not take into consideration
* all possibilities.
* Scala compile will warn about a non-exhaustive match pattern.
*/
def describe(e: Expr): String = e match {
case Number(_) => "a number"
case Var(_) => "a variable"
}
/*
* The warning can be silenced:
*/
def describe2(e: Expr): String = (e: @unchecked) match {
case Number(_) => "a number"
case Var(_) => "a variable"
}
println(describe(Var("X")))
println(describe(Number(10.0)))
} | arcyfelix/Courses | 18-10-18-Programming-in-Scala-by-Martin-Odersky-Lex-Spoon-and-Bill-Venners/43-SealedClasses/src/main.scala | Scala | apache-2.0 | 952 |
package models.user
import play.api.libs.json._
import play.api.libs.functional.syntax._
import reactivemongo.bson._
import services.dao.UtilBson
case class ProviderUser(
id: String,
socialType: String,
token:Option[SkimboToken],
username: Option[String] = None,
name: Option[String] = None,
description: Option[String] = None,
avatar: Option[String] = None)
object ProviderUser {
implicit val writer = (
(__ \\ "id").write[String] and
(__ \\ "socialType").write[String] and
(__ \\ "token").write[Option[SkimboToken]] and
(__ \\ "username").write[Option[String]] and
(__ \\ "name").write[Option[String]] and
(__ \\ "description").write[Option[String]] and
(__ \\ "avatar").write[Option[String]]
)(unlift(ProviderUser.unapply))
def toBSON(distant: ProviderUser) = {
BSONDocument(
"id" -> BSONString(distant.id),
"social" -> BSONString(distant.socialType),
"token" -> SkimboToken.toBSON(distant.token.getOrElse(SkimboToken(""))))
}
def fromBSON(d: BSONDocument) = {
ProviderUser(
d.getAs[String]("id").get,
d.getAs[String]("social").get,
SkimboToken.fromBSON(d.getAs[BSONDocument]("token").get)
)
}
} | Froggies/Skimbo | app/models/user/ProviderUser.scala | Scala | agpl-3.0 | 1,206 |
// Code generated by sbt-mavgen. Manual edits will be overwritten
package scavlink.message.enums
/**
* Navigation filter mode
*/
object UalbertaNavMode extends Enumeration {
val _UNKNOWN = Value(0)
val AHRS_INIT = Value(1)
/**
* AHRS mode
*/
val AHRS = Value(2)
/**
* INS/GPS initialization mode
*/
val INS_GPS_INIT = Value(3)
/**
* INS/GPS mode
*/
val INS_GPS = Value(4)
}
| nickolasrossi/scavlink | src/main/scala/scavlink/message/enums/UalbertaNavMode.scala | Scala | mit | 411 |
package inloopio.math.algebra
/**
* The basic interface including numerous convenience functions <p/> NOTE: All implementing classes must have a
* constructor that takes an int for cardinality and a no-arg constructor that can be used for marshalling the Writable
* instance <p/> NOTE: Implementations may choose to reuse the Vector.Element in the Iterable methods
*/
trait Vector extends Iterable[Vector.Element] with Cloneable {
type Element = Vector.Element
/**
* @return a formatted String suitable for output
*/
def asFormatString: String
/**
* Assign the value to all elements of the receiver
*
* @param value a Double value
* @return the modified receiver
*/
def assign(value: Double): Vector
/**
* Assign the values to the receiver
*
* @param values a Double[] of values
* @return the modified receiver
* @throws CardinalityException if the cardinalities differ
*/
def assign(values: Array[Double]): Vector
/**
* Assign the other vector values to the receiver
*
* @param other a Vector
* @return the modified receiver
* @throws CardinalityException if the cardinalities differ
*/
def assign(other: Vector): Vector
/**
* Apply the function to each element of the receiver
*
* @param function a Double => Double to apply
* @return the modified receiver
*/
def assign(function: Double => Double): Vector
/**
* Apply the function to each element of the receiver and the corresponding element of the other argument
*
* @param other a Vector containing the second arguments to the function
* @param function a (Double, Double) => Double to apply
* @return the modified receiver
* @throws CardinalityException if the cardinalities differ
*/
def assign(other: Vector, function: (Double, Double) => Double): Vector
/**
* Apply the function to each element of the receiver, using the y value as the second argument of the (Double, Double) => Double
*
* @param f a (Double, Double) => Double to be applied
* @param y a Double value to be argument to the function
* @return the modified receiver
*/
def assign(f: (Double, Double) => Double, y: Double): Vector
/**
* Return the cardinality of the recipient (the maximum number of values)
*
* @return an int
*/
def size: Int
/**
* @return true iff this implementation should be considered dense -- that it explicitly
* represents every value
*/
def isDense: Boolean
/**
* @return true iff this implementation should be considered to be iterable in index order in an efficient way.
* In particular this implies that {@link #iterator()} and {@link #iterateNonZero()} return elements
* in ascending order by index.
*/
def isSequentialAccess: Boolean
/**
* Return a copy of the recipient
*
* @return a new Vector
*/
override def clone: Vector = {
// Scala's compiler seems to complain that the clone method is the protected
// one from Object instead of this overrided one when it's called outside the
// protected scope. For instance:
// method clone in class Object cannot be accessed in ....
// Access to protected method clone not permitted because
// To bypass it, we need to implement it with following statement
throw new CloneNotSupportedException
}
/**
* Iterates over all elements <p/> * NOTE: Implementations may choose to reuse the Element returned for performance
* reasons, so if you need a copy of it, you should call {@link #getElement(int)} for the given index
*
* @return An {@link Iterator} over all elements
*/
def iterator: Iterator[Element]
/**
* Iterates over all non-zero elements. <p/> NOTE: Implementations may choose to reuse the Element returned for
* performance reasons, so if you need a copy of it, you should call {@link #getElement(int)} for the given index
*
* @return An {@link Iterator} over all non-zero elements
*/
def iterateNonZero: Iterator[Element]
/**
* Return an object of Vector.Element representing an element of this Vector. Useful when designing new iterator
* types.
*
* @param index Index of the Vector.Element required
* @return The Vector.Element Object
*/
def getElement(index: Int): Element
/**
* Return a new vector containing the values of the recipient divided by the argument
*
* @param x a Double value
* @return a new Vector
*/
def divide(x: Double): Vector
/**
* Return the dot product of the recipient and the argument
*
* @param x a Vector
* @return a new Vector
* @throws CardinalityException if the cardinalities differ
*/
def dot(x: Vector): Double
/**
* Return the value at the given index
*
* @param index an int index
* @return the Double at the index
* @throws IndexException if the index is out of bounds
*/
def get(index: Int): Double
/**
* Return the value at the given index, without checking bounds
*
* @param index an int index
* @return the Double at the index
*/
def apply(index: Int): Double
/**
* Return an empty vector of the same underlying class as the receiver
*
* @return a Vector
*/
def like(): Vector
/**
* Return a new vector containing the element by element difference of the recipient and the argument
*
* @param x a Vector
* @return a new Vector
* @throws CardinalityException if the cardinalities differ
*/
def minus(x: Vector): Vector
/**
* Return a new vector containing the normalized (L_2 norm) values of the recipient
*
* @return a new Vector
*/
def normalize: Vector
/**
* Return a new Vector containing the normalized (L_power norm) values of the recipient. <p/> See
* http://en.wikipedia.org/wiki/Lp_space <p/> Technically, when 0 < power < 1, we don't have a norm, just a metric,
* but we'll overload this here. <p/> Also supports power == 0 (number of non-zero elements) and power = {@link
* Double#POSITIVE_INFINITY} (max element). Again, see the Wikipedia page for more info
*
* @param power The power to use. Must be >= 0. May also be {@link Double#POSITIVE_INFINITY}. See the Wikipedia link
* for more on this.
* @return a new Vector x such that norm(x, power) == 1
*/
def normalize(power: Double): Vector
/**
* Return a new vector containing the log(1 + entry)/ L_2 norm values of the recipient
*
* @return a new Vector
*/
def logNormalize: Vector
/**
* Return a new Vector with a normalized value calculated as log_power(1 + entry)/ L_power norm. <p/>
*
* @param power The power to use. Must be > 1. Cannot be {@link Double#POSITIVE_INFINITY}.
* @return a new Vector
*/
def logNormalize(power: Double): Vector
/**
* Return the k-norm of the vector. <p/> See http://en.wikipedia.org/wiki/Lp_space <p/> Technically, when 0 > power
* < 1, we don't have a norm, just a metric, but we'll overload this here. Also supports power == 0 (number of
* non-zero elements) and power = {@link Double#POSITIVE_INFINITY} (max element). Again, see the Wikipedia page for
* more info.
*
* @param power The power to use.
* @see #normalize(Double)
*/
def norm(power: Double): Double
/** @return The minimum value in the Vector */
def minValue: Double
/** @return The index of the minimum value */
def minValueIndex: Int
/** @return The maximum value in the Vector */
def maxValue: Double
/** @return The index of the maximum value */
def maxValueIndex: Int
/**
* Return a new vector containing the sum of each value of the recipient and the argument
*
* @param x a Double
* @return a new Vector
*/
def plus(x: Double): Vector
/**
* Return a new vector containing the element by element sum of the recipient and the argument
*
* @param x a Vector
* @return a new Vector
* @throws CardinalityException if the cardinalities differ
*/
def plus(x: Vector): Vector
/**
* Set the value at the given index
*
* @param index an int index into the receiver
* @param value a Double value to set
* @throws IndexException if the index is out of bounds
*/
def set(index: Int, value: Double)
/**
* Set the value at the given index, without checking bounds
*
* @param index an int index into the receiver
* @param value a Double value to set
*/
def update(index: Int, value: Double)
/**
* Return the number of values in the recipient which are not the default value. For instance, for a
* sparse vector, this would be the number of non-zero values.
*
* @return an int
*/
def getNumNondefaultElements: Int
/**
* Return a new vector containing the product of each value of the recipient and the argument
*
* @param x a Double argument
* @return a new Vector
*/
def times(x: Double): Vector
/**
* Return a new vector containing the element-wise product of the recipient and the argument
*
* @param x a Vector argument
* @return a new Vector
* @throws CardinalityException if the cardinalities differ
*/
def times(x: Vector): Vector
/**
* Return a new vector containing the subset of the recipient
*
* @param offset an int offset into the receiver
* @param length the cardinality of the desired result
* @return a new Vector
* @throws CardinalityException if the length is greater than the cardinality of the receiver
* @throws IndexException if the offset is negative or the offset+length is outside of the receiver
*/
def viewPart(offset: Int, length: Int): Vector
/**
* Return the sum of all the elements of the receiver
*
* @return a Double
*/
def zSum: Double
/**
* Return the cross product of the receiver and the other vector
*
* @param other another Vector
* @return a Matrix
*/
def cross(other: Vector): Matrix
/*
* Need stories for these but keeping them here for now.
*/
// void getNonZeros(IntArrayList jx, DoubleArrayList values)
// void foreachNonZero(IntDoubleFunction f)
// (Double, Double) => Double map)
// NewVector assign(Vector y, (Double, Double) => Double function, IntArrayList
// nonZeroIndexes)
/**
* Examples speak louder than words: aggregate(plus, pow(2)) is another way to say
* getLengthSquared(), aggregate(max, abs) is norm(Double.POSITIVE_INFINITY). To sum all of the postive values,
* aggregate(plus, max(0)).
* @param aggregator used to combine the current value of the aggregation with the result of map.apply(nextValue)
* @param map a function to apply to each element of the vector in turn before passing to the aggregator
* @return the final aggregation
*/
def aggregate(aggregator: (Double, Double) => Double, map: Double => Double): Double
/**
* <p>Generalized inner product - take two vectors, iterate over them both, using the combiner to combine together
* (and possibly map in some way) each pair of values, which are then aggregated with the previous accumulated
* value in the combiner.</p>
* <p>
* Example: dot(other) could be expressed as aggregate(other, Plus, Times), and kernelized inner products (which
* are symmetric on the indices) work similarly.
* @param other a vector to aggregate in combination with
* @param aggregator
* @param combiner
* @return the final aggregation
*/
def aggregate(other: Vector, aggregator: (Double, Double) => Double, combiner: (Double, Double) => Double): Double
/** Return the sum of squares of all elements in the vector. Square root of this value is the length of the vector. */
def getLengthSquared: Double
/** Get the square of the distance between this vector and the other vector. */
def getDistanceSquared(v: Vector): Double
}
object Vector {
/**
* A holder for information about a specific item in the Vector. <p/> When using with an Iterator, the implementation
* may choose to reuse this element, so you may need to make a copy if you want to keep it
*/
trait Element {
/** @return the value of this vector element. */
def get: Double
/** @return the index of this vector element. */
def index: Int
/** @param value Set the current element to value. */
def set(value: Double)
}
} | dcaoyuan/inloopio-libs | inloopio-math/src/main/scala/inloopio/math/algebra/Vector.scala | Scala | bsd-3-clause | 12,352 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import sbt._
import Keys._
import Process._
import scala.xml.{Node, Elem}
import scala.xml.transform.{RewriteRule, RuleTransformer}
object KafkaHttpBuild extends Build {
val buildNumber = SettingKey[String]("build-number", "Build number defaults to $BUILD_NUMBER environment variable")
val releaseName = SettingKey[String]("release-name", "the full name of this release")
val commonSettings = Seq(
organization := "com.rackspace",
pomExtra :=
<parent>
<groupId>com.rackpace</groupId>
<artifactId>rackspace</artifactId>
<version>10</version>
</parent>
<licenses>
<license>
<name>Apache 2</name>
<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
<distribution>repo</distribution>
</license>
</licenses>,
scalacOptions ++= Seq("-deprecation", "-unchecked", "-g:none"),
crossScalaVersions := Seq("2.9.1", "2.9.2"),
scalaVersion := "2.9.2",
version := "0.0.1",
publishTo := Some("Apache Maven Repo" at "https://repository.apache.org/service/local/staging/deploy/maven2"),
credentials += Credentials(Path.userHome / ".m2" / ".credentials"),
buildNumber := System.getProperty("build.number", ""),
version <<= (buildNumber, version) { (build, version) => if (build == "") version else version + "+" + build},
releaseName <<= (name, version, scalaVersion) {(name, version, scalaVersion) => name + "_" + scalaVersion + "-" + version},
javacOptions ++= Seq("-Xlint:unchecked", "-source", "1.5"),
parallelExecution in Test := false, // Prevent tests from overrunning each other
libraryDependencies ++= Seq(
"log4j" % "log4j" % "1.2.15",
"net.sf.jopt-simple" % "jopt-simple" % "3.2",
"org.slf4j" % "slf4j-simple" % "1.6.4",
"org.eclipse.jetty" % "jetty-server" % "8.1.14.v20131031",
"org.eclipse.jetty" % "jetty-servlet" % "8.1.14.v20131031",
"org.mongodb" % "mongo-java-driver" % "2.11.3",
"org.apache.kafka" % "kafka_2.9.2" % "0.8.0",
"org.slf4j" % "slf4j-api" % "1.6.4",
"org.slf4j" % "slf4j-simple" % "1.6.4"
),
// The issue is going from log4j 1.2.14 to 1.2.15, the developers added some features which required
// some dependencies on various sun and javax packages.
ivyXML := <dependencies>
<exclude module="javax"/>
<exclude module="jmxri"/>
<exclude module="jmxtools"/>
<exclude module="mail"/>
<exclude module="jms"/>
<dependency org="org.apache.kafka" name="kafka_2.9.2" rev="0.8.0"></dependency>
<dependency org="org.apache.zookeeper" name="zookeeper" rev="3.3.4">
<exclude org="log4j" module="log4j"/>
<exclude org="jline" module="jline"/>
</dependency>
<dependency org="com.timgroup" name="java-statsd-client" rev="2.0.0"/>
<dependency org="org.eclipse.jetty" name="jetty-server" rev="8.1.14.v20131031">
</dependency>
<dependency org="org.eclipse.jetty" name="jetty-servlet" rev="8.1.14.v20131031">
</dependency>
<dependency org="org.mongodb" name="mongo-java-driver" rev="2.11.3">
</dependency>
</dependencies>
)
libraryDependencies <+= scalaVersion("org.scala-lang" % "scala-compiler" % _ )
val release = TaskKey[Unit]("release", "Creates a deployable release directory file with dependencies, config, and scripts.")
val releaseTask = release <<= ( packageBin in Compile, dependencyClasspath in Runtime, exportedProducts in Compile,
target, releaseName) map { (packageBin, deps, products, target, releaseName) =>
// NOTE: explicitly exclude sbt-launch.jar dep here, because it can use different scala version and if copied to jars folder will break the build
// Found the hard way :-(
val jarFiles = deps.files.filter(f => !products.files.contains(f) && f.getName.endsWith(".jar") && f.getName != "sbt-launch.jar")
val destination = target / "RELEASE" / releaseName
IO.copyFile(packageBin, destination / packageBin.getName)
IO.copy(jarFiles.map { f => (f, destination / "libs" / f.getName) })
IO.copyDirectory(file("config"), destination / "config")
IO.copyDirectory(file("bin"), destination / "bin")
for {file <- (destination / "bin").listFiles} { file.setExecutable(true, true) }
}
lazy val kafkaHttp = Project(id = "KafkaHttp", base = file(".")).settings((commonSettings ++ releaseTask): _*)
}
| mailgun/kafka-http | project/Build.scala | Scala | apache-2.0 | 5,288 |
package com.warningrc.test.scalatest
package spark {
package navigation {
private[spark] class Navigator {
protected[navigation] def useStarChart(): Unit = {
}
class LegOfJourney {
private[Navigator] val distance = 100
}
private[this] var speed = 200
}
}
package launch {
import navigation._
object Vehicle {
private[launch] val guide = new Navigator
}
}
}
class ScalaPackageAdvanced {
import ScalaPackageAdvanced._
private[ScalaPackageAdvanced] def canMakeItTrue = power > 10001
private def thisName = name
}
object ScalaPackageAdvanced {
private[ScalaPackageAdvanced] def power = 10000
private def name = "name"
def makeItTrue(p: ScalaPackageAdvanced): Boolean = {
println(p.thisName)
p.canMakeItTrue
}
}
| warningrc/learn-java | scala-test/src/test/scala/com/warningrc/test/scalatest/ScalaPackageAdvanced.scala | Scala | apache-2.0 | 898 |
class SameBytecode {
def foo(a: Int, b: String) = 0
def foo(a: Int, b: Any) = 0
def a = foo(0, "")
def b = foo(a = 0, "")
def c = foo(0, b = "")
def d = foo(a = 0, b = "")
}
| scala/scala | test/files/jvm/named-args-in-order/SameBytecode.scala | Scala | apache-2.0 | 187 |
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
import com.intellij.psi.stubs.StubElement
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.base.{ScIdList, ScPatternList}
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScVariable
/**
* User: Alexander Podkhalyuzin
* Date: 17.10.2008
*/
trait ScVariableStub extends StubElement[ScVariable] with ScMemberOrLocal {
def isDeclaration: Boolean
def getNames: Array[String]
def getBodyText: String
def getTypeText: String
def getBindingsContainerText: String
def getTypeElement: Option[ScTypeElement]
def getBodyExpr: Option[ScExpression]
def getIdsContainer: Option[ScIdList]
def getPatternsContainer: Option[ScPatternList]
}
| triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/stubs/ScVariableStub.scala | Scala | apache-2.0 | 871 |
package scala.pickling.binary.`case`.`class`.int.string
import org.scalatest.FunSuite
import scala.pickling._, scala.pickling.Defaults._, binary._
import reflect.runtime.{universe => ru}
import ru._
case class Person(name: String, age: Int)
class BinaryCaseClassIntStringTest extends FunSuite {
test("main") {
val p = Person("Jim", 43)
val pickle = p.pickle
assert(pickle.value.mkString("[", ",", "]") === "[0,0,0,50,115,99,97,108,97,46,112,105,99,107,108,105,110,103,46,98,105,110,97,114,121,46,99,97,115,101,46,99,108,97,115,115,46,105,110,116,46,115,116,114,105,110,103,46,80,101,114,115,111,110,0,0,0,3,74,105,109,0,0,0,43]")
val up = pickle.unpickle[Person]
assert(p === up)
}
}
| phaller/pickling | core/src/test/scala/pickling/run/binary-case-class-int-string.scala | Scala | bsd-3-clause | 711 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import scala.collection.mutable.ArrayBuffer
import org.scalatest.{BeforeAndAfterEach, PrivateMethodTester}
import org.apache.spark.SparkFunSuite
import org.apache.spark.internal.config.Tests.TEST_USE_COMPRESSED_OOPS_KEY
class DummyClass1 {}
class DummyClass2 {
val x: Int = 0
}
class DummyClass3 {
val x: Int = 0
val y: Double = 0.0
}
class DummyClass4(val d: DummyClass3) {
val x: Int = 0
}
// dummy class to show class field blocks alignment.
class DummyClass5 extends DummyClass1 {
val x: Boolean = true
}
class DummyClass6 extends DummyClass5 {
val y: Boolean = true
}
class DummyClass7 {
val x: DummyClass1 = new DummyClass1
}
object DummyString {
def apply(str: String) : DummyString = new DummyString(str.toArray)
}
class DummyString(val arr: Array[Char]) {
override val hashCode: Int = 0
// JDK-7 has an extra hash32 field http://hg.openjdk.java.net/jdk7u/jdk7u6/jdk/rev/11987e85555f
@transient val hash32: Int = 0
}
class DummyClass8 extends KnownSizeEstimation {
val x: Int = 0
override def estimatedSize: Long = 2015
}
class SizeEstimatorSuite
extends SparkFunSuite
with BeforeAndAfterEach
with PrivateMethodTester
with ResetSystemProperties {
override def beforeEach() {
// Set the arch to 64-bit and compressedOops to true to get a deterministic test-case
super.beforeEach()
System.setProperty("os.arch", "amd64")
System.setProperty(TEST_USE_COMPRESSED_OOPS_KEY, "true")
}
override def afterEach(): Unit = {
super.afterEach()
}
test("simple classes") {
assertResult(16)(SizeEstimator.estimate(new DummyClass1))
assertResult(16)(SizeEstimator.estimate(new DummyClass2))
assertResult(24)(SizeEstimator.estimate(new DummyClass3))
assertResult(24)(SizeEstimator.estimate(new DummyClass4(null)))
assertResult(48)(SizeEstimator.estimate(new DummyClass4(new DummyClass3)))
}
test("primitive wrapper objects") {
assertResult(16)(SizeEstimator.estimate(java.lang.Boolean.TRUE))
assertResult(16)(SizeEstimator.estimate(java.lang.Byte.valueOf("1")))
assertResult(16)(SizeEstimator.estimate(java.lang.Character.valueOf('1')))
assertResult(16)(SizeEstimator.estimate(java.lang.Short.valueOf("1")))
assertResult(16)(SizeEstimator.estimate(java.lang.Integer.valueOf(1)))
assertResult(24)(SizeEstimator.estimate(java.lang.Long.valueOf(1)))
assertResult(16)(SizeEstimator.estimate(java.lang.Float.valueOf(1.0f)))
assertResult(24)(SizeEstimator.estimate(java.lang.Double.valueOf(1.0)))
}
test("class field blocks rounding") {
assertResult(16)(SizeEstimator.estimate(new DummyClass5))
assertResult(24)(SizeEstimator.estimate(new DummyClass6))
}
// NOTE: The String class definition varies across JDK versions (1.6 vs. 1.7) and vendors
// (Sun vs IBM). Use a DummyString class to make tests deterministic.
test("strings") {
assertResult(40)(SizeEstimator.estimate(DummyString("")))
assertResult(48)(SizeEstimator.estimate(DummyString("a")))
assertResult(48)(SizeEstimator.estimate(DummyString("ab")))
assertResult(56)(SizeEstimator.estimate(DummyString("abcdefgh")))
}
test("primitive arrays") {
assertResult(32)(SizeEstimator.estimate(new Array[Byte](10)))
assertResult(40)(SizeEstimator.estimate(new Array[Char](10)))
assertResult(40)(SizeEstimator.estimate(new Array[Short](10)))
assertResult(56)(SizeEstimator.estimate(new Array[Int](10)))
assertResult(96)(SizeEstimator.estimate(new Array[Long](10)))
assertResult(56)(SizeEstimator.estimate(new Array[Float](10)))
assertResult(96)(SizeEstimator.estimate(new Array[Double](10)))
assertResult(4016)(SizeEstimator.estimate(new Array[Int](1000)))
assertResult(8016)(SizeEstimator.estimate(new Array[Long](1000)))
}
test("object arrays") {
// Arrays containing nulls should just have one pointer per element
assertResult(56)(SizeEstimator.estimate(new Array[String](10)))
assertResult(56)(SizeEstimator.estimate(new Array[AnyRef](10)))
// For object arrays with non-null elements, each object should take one pointer plus
// however many bytes that class takes. (Note that Array.fill calls the code in its
// second parameter separately for each object, so we get distinct objects.)
assertResult(216)(SizeEstimator.estimate(Array.fill(10)(new DummyClass1)))
assertResult(216)(SizeEstimator.estimate(Array.fill(10)(new DummyClass2)))
assertResult(296)(SizeEstimator.estimate(Array.fill(10)(new DummyClass3)))
assertResult(56)(SizeEstimator.estimate(Array(new DummyClass1, new DummyClass2)))
// Past size 100, our samples 100 elements, but we should still get the right size.
assertResult(28016)(SizeEstimator.estimate(Array.fill(1000)(new DummyClass3)))
val arr = new Array[Char](100000)
assertResult(200016)(SizeEstimator.estimate(arr))
assertResult(480032)(SizeEstimator.estimate(Array.fill(10000)(new DummyString(arr))))
val buf = new ArrayBuffer[DummyString]()
for (i <- 0 until 5000) {
buf += new DummyString(new Array[Char](10))
}
assertResult(340016)(SizeEstimator.estimate(buf.toArray))
for (i <- 0 until 5000) {
buf += new DummyString(arr)
}
assertResult(683912)(SizeEstimator.estimate(buf.toArray))
// If an array contains the *same* element many times, we should only count it once.
val d1 = new DummyClass1
// 10 pointers plus 8-byte object
assertResult(72)(SizeEstimator.estimate(Array.fill(10)(d1)))
// 100 pointers plus 8-byte object
assertResult(432)(SizeEstimator.estimate(Array.fill(100)(d1)))
// Same thing with huge array containing the same element many times. Note that this won't
// return exactly 4032 because it can't tell that *all* the elements will equal the first
// one it samples, but it should be close to that.
// TODO: If we sample 100 elements, this should always be 4176 ?
val estimatedSize = SizeEstimator.estimate(Array.fill(1000)(d1))
assert(estimatedSize >= 4000, "Estimated size " + estimatedSize + " should be more than 4000")
assert(estimatedSize <= 4200, "Estimated size " + estimatedSize + " should be less than 4200")
}
test("32-bit arch") {
System.setProperty("os.arch", "x86")
val initialize = PrivateMethod[Unit]('initialize)
SizeEstimator invokePrivate initialize()
assertResult(40)(SizeEstimator.estimate(DummyString("")))
assertResult(48)(SizeEstimator.estimate(DummyString("a")))
assertResult(48)(SizeEstimator.estimate(DummyString("ab")))
assertResult(56)(SizeEstimator.estimate(DummyString("abcdefgh")))
}
// NOTE: The String class definition varies across JDK versions (1.6 vs. 1.7) and vendors
// (Sun vs IBM). Use a DummyString class to make tests deterministic.
test("64-bit arch with no compressed oops") {
System.setProperty("os.arch", "amd64")
System.setProperty(TEST_USE_COMPRESSED_OOPS_KEY, "false")
val initialize = PrivateMethod[Unit]('initialize)
SizeEstimator invokePrivate initialize()
assertResult(56)(SizeEstimator.estimate(DummyString("")))
assertResult(64)(SizeEstimator.estimate(DummyString("a")))
assertResult(64)(SizeEstimator.estimate(DummyString("ab")))
assertResult(72)(SizeEstimator.estimate(DummyString("abcdefgh")))
// primitive wrapper classes
assertResult(24)(SizeEstimator.estimate(java.lang.Boolean.TRUE))
assertResult(24)(SizeEstimator.estimate(java.lang.Byte.valueOf("1")))
assertResult(24)(SizeEstimator.estimate(java.lang.Character.valueOf('1')))
assertResult(24)(SizeEstimator.estimate(java.lang.Short.valueOf("1")))
assertResult(24)(SizeEstimator.estimate(java.lang.Integer.valueOf(1)))
assertResult(24)(SizeEstimator.estimate(java.lang.Long.valueOf(1)))
assertResult(24)(SizeEstimator.estimate(java.lang.Float.valueOf(1.0f)))
assertResult(24)(SizeEstimator.estimate(java.lang.Double.valueOf(1.0)))
}
test("class field blocks rounding on 64-bit VM without useCompressedOops") {
assertResult(24)(SizeEstimator.estimate(new DummyClass5))
assertResult(32)(SizeEstimator.estimate(new DummyClass6))
}
test("check 64-bit detection for s390x arch") {
System.setProperty("os.arch", "s390x")
val initialize = PrivateMethod[Unit]('initialize)
SizeEstimator invokePrivate initialize()
// Class should be 32 bytes on s390x if recognised as 64 bit platform
assertResult(32)(SizeEstimator.estimate(new DummyClass7))
}
test("SizeEstimation can provide the estimated size") {
// DummyClass8 provides its size estimation.
assertResult(2015)(SizeEstimator.estimate(new DummyClass8))
assertResult(20206)(SizeEstimator.estimate(Array.fill(10)(new DummyClass8)))
}
}
| pgandhi999/spark | core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala | Scala | apache-2.0 | 9,565 |
package org.jetbrains.plugins.scala
package script
import com.intellij.execution.configurations.{RunConfiguration, ConfigurationType, ConfigurationFactory}
import com.intellij.openapi.project.Project
import javax.swing.Icon
import java.lang.String
import config.ScalaFacet
/**
* User: Alexander Podkhalyuzin
* Date: 04.02.2009
*/
class ScalaScriptRunConfigurationFactory(val typez: ConfigurationType) extends ConfigurationFactory(typez) {
def createTemplateConfiguration(project: Project): RunConfiguration = {
val configuration = new ScalaScriptRunConfiguration(project, this, "")
initDefault(configuration)
return configuration
}
override def createConfiguration(name: String, template: RunConfiguration): RunConfiguration = {
val configuration = (super.createConfiguration(name, template)).asInstanceOf[ScalaScriptRunConfiguration]
ScalaFacet.findModulesIn(template.getProject).headOption.foreach {
configuration.setModule _
}
configuration }
private def initDefault(configuration: ScalaScriptRunConfiguration): Unit = {
}
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/script/ScalaScriptRunConfigurationFactory.scala | Scala | apache-2.0 | 1,081 |
package org.jetbrains.plugins.scala
package lang
package refactoring
import com.intellij.lang.refactoring.RefactoringSupportProvider
import com.intellij.psi.PsiElement
import com.intellij.refactoring.RefactoringActionHandler
import com.intellij.refactoring.changeSignature.ChangeSignatureHandler
import org.jetbrains.plugins.scala.lang.psi.api.base.ScFieldId
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScReferencePattern
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTypeDefinition
import org.jetbrains.plugins.scala.lang.refactoring.changeSignature.ScalaChangeSignatureHandler
import org.jetbrains.plugins.scala.lang.refactoring.extractMethod.ScalaExtractMethodHandler
import org.jetbrains.plugins.scala.lang.refactoring.introduceField.ScalaIntroduceFieldFromExpressionHandler
import org.jetbrains.plugins.scala.lang.refactoring.introduceParameter.ScalaIntroduceParameterHandler
import org.jetbrains.plugins.scala.lang.refactoring.introduceVariable.ScalaIntroduceVariableHandler
/**
* User: Alexander Podkhalyuzin
* Date: 29.03.2009
*/
class ScalaRefactoringSupportProvider extends RefactoringSupportProvider {
override def isInplaceRenameAvailable(element: PsiElement, context: PsiElement) = {
false // handled by ScalaInplaceRenameHandler
}
override def getIntroduceConstantHandler: RefactoringActionHandler = null
override def getIntroduceVariableHandler: RefactoringActionHandler = new ScalaIntroduceVariableHandler
override def getIntroduceFieldHandler: RefactoringActionHandler = new ScalaIntroduceFieldFromExpressionHandler
override def getIntroduceParameterHandler: RefactoringActionHandler = new ScalaIntroduceParameterHandler
override def isSafeDeleteAvailable(element: PsiElement): Boolean = element match {
case _: ScTypeDefinition | _: ScFunction | _: ScFieldId | _: ScReferencePattern => true
case _ => false
}
override def getExtractMethodHandler: RefactoringActionHandler = new ScalaExtractMethodHandler
override def getChangeSignatureHandler: ChangeSignatureHandler = new ScalaChangeSignatureHandler
}
| triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/lang/refactoring/ScalaRefactoringSupportProvider.scala | Scala | apache-2.0 | 2,167 |
package io.getquill.context.sql.norm
import io.getquill.ast._
object ExpandMappedInfix {
def apply(q: Ast): Ast = {
Transform(q) {
case Map(Infix("" :: parts, (q: Query) :: params, pure, quat), x, p) =>
Infix("" :: parts, Map(q, x, p) :: params, pure, quat)
}
}
}
| getquill/quill | quill-sql-portable/src/main/scala/io/getquill/sql/norm/ExpandMappedInfix.scala | Scala | apache-2.0 | 292 |
import io.floyd.events.EventsActor
import akka.testkit.TestActorRef
class TestEventsActor extends BaseUnitTestActor {
"EventsActor" should "increment value when create a child actor" in {
val actorRef = TestActorRef[EventsActor]
val actor = actorRef.underlyingActor
actorRef ! self
actor.nextValue.next() should be (2)
receiveN(2)
}
"EventsActor.createNameOfStreamer" should "give several stream consecutive names" in {
val actorRef = TestActorRef[EventsActor]
val actor = actorRef.underlyingActor
actor.createNameOfStreamer should be ("stream1")
actor.createNameOfStreamer should be ("stream2")
}
} | floyd-io/floyd-scala | src/test/scala/events/TestEventsActor.scala | Scala | lgpl-3.0 | 648 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package xerial.silk.frame.weaver
import java.util.Properties
import xerial.core.log.Logger
import xerial.silk.frame.Database
object SQLite {
def memoryDatabase = SQLite(":memory:")
}
case class SQLite(name: String) extends Database {
override def toString = name
}
object SQLiteWeaver {
case class Config(jdbcProperties: Properties = new Properties())
}
/**
*
*/
class SQLiteWeaver extends Weaver with StateStore with JDBCWeaver with Logger {
import SQLiteWeaver._
override type Config = SQLiteWeaver.Config
override val config: Config = Config()
protected val jdbcDriverName = "org.sqlite.JDBC"
protected def jdbcUrl(databaseName: String): String = s"jdbc:sqlite:${databaseName}"
protected def jdbcProperties = config.jdbcProperties
}
| xerial/silk | silk-frame/src/main/scala/xerial/silk/frame/weaver/SQLiteWeaver.scala | Scala | apache-2.0 | 1,327 |
package moe.pizza.eveapi.endpoints
import moe.pizza.eveapi.generated.corp
import moe.pizza.eveapi._
import org.http4s.client.Client
class Corp(baseurl: String, apikey: Option[XmlApiKey])(implicit c: Client) {
def AccountBalance() =
new ApiRequest[corp.AccountBalance.Eveapi](baseurl, "corp/AccountBalance.xml.aspx", apikey).apply().map { r =>
new XMLApiResponse(r.currentTime.toDateTime, r.cachedUntil.toDateTime, r.result.rowset.row)
}
def ContactList() =
new ApiRequest[corp.ContactList.Eveapi](baseurl, "corp/ContactList.xml.aspx", apikey).apply().map { r =>
new XMLApiResponse(r.currentTime.toDateTime, r.cachedUntil.toDateTime, r.result.rowset)
}
def Standings() = new ApiRequest[corp.Standings.Eveapi](baseurl, "corp/Standings.xml.aspx", apikey).apply().map {
r =>
new XMLApiResponse(r.currentTime.toDateTime, r.cachedUntil.toDateTime, r.result.corporationNPCStandings.rowset)
}
}
| xxpizzaxx/pizza-eveapi | src/main/scala/moe/pizza/eveapi/endpoints/Corp.scala | Scala | mit | 935 |
package rere.ql.queries
import org.scalatest.FlatSpec
import rere.ql.types._
class DatabaseQueriesTest extends FlatSpec with ReqlMatchers {
import rere.ql.queries.all.r
import rere.ql.queries.db._
import rere.ql.queries.math._
import rere.ql.queries.values._
behavior of "DatabaseQueries"
it should "provide .db operator on r" in {
r.db("test") shouldBe subtypeOf [ReqlDatabase] and serializedTo("""[14,["test"]]""")
}
it should "provide .dbCreate operator on r" in {
r.dbCreate("superheroes") shouldBe
subtypeOf [ReqlDatabaseCreationResult] and serializedTo("""[57,["superheroes"]]""")
r.dbCreate(r.expr("super").add("heroes")) shouldBe
subtypeOf [ReqlDatabaseCreationResult] and serializedTo("""[57,[[24,["super","heroes"]]]]""")
}
it should "provide .dbDrop operator on r" in {
r.dbDrop("superheroes") shouldBe
subtypeOf [ReqlDatabaseDroppingResult] and serializedTo("""[58,["superheroes"]]""")
r.dbDrop(r.expr("super").add("heroes")) shouldBe
subtypeOf [ReqlDatabaseDroppingResult] and serializedTo("""[58,[[24,["super","heroes"]]]]""")
}
it should "provide .dbList operator on r" in {
r.dbList() shouldBe subtypeOf [ReqlArray[ReqlString]] and serializedTo("""[59,[]]""")
}
}
| pbaun/rere | modules/ql/src/test/scala/rere/ql/queries/DatabaseQueriesTest.scala | Scala | apache-2.0 | 1,264 |
package akka.persistence.pg.journal
import java.util.concurrent.TimeUnit
import akka.actor.{ActorRef, ActorSystem, Status}
import akka.persistence.pg.PluginConfig
import akka.persistence.pg.journal.StoreActor.{Store, StoreSuccess}
import akka.pattern.ask
import akka.util.Timeout
import scala.concurrent.{ExecutionContext, Future}
trait WriteStrategy {
def pluginConfig: PluginConfig
lazy val driver = pluginConfig.pgPostgresProfile
import driver.api._
def store(actions: Seq[DBIO[_]],
notifier: Notifier)
(implicit executionContext: ExecutionContext): Future[Unit]
def system: ActorSystem
}
class SingleThreadedBatchWriteStrategy(override val pluginConfig: PluginConfig,
override val system: ActorSystem) extends WriteStrategy {
import driver.api._
implicit val timeout = Timeout(10, TimeUnit.SECONDS)
private val eventStoreActor: ActorRef = system.actorOf(StoreActor.props(pluginConfig))
override def store(actions: Seq[DBIO[_]],
notifier: Notifier)
(implicit executionContext: ExecutionContext): Future[Unit] = {
eventStoreActor ? Store(actions) flatMap {
case StoreSuccess => Future.successful(())
case Status.Failure(t) => Future.failed(t)
} map { _ =>
notifier.eventsAvailable()
}
}
}
/**
* This writestrategy can lead to missing events, only usefull as a benchmarking baseline
*
* @param pluginConfig
* @param system
*/
class TransactionalWriteStrategy(override val pluginConfig: PluginConfig,
override val system: ActorSystem) extends WriteStrategy {
system.log.warning(
"""
|!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|! !
|! TransactionalWriteStrategy is configured: !
|! !
|! A possible, but likely consequence is that while reading events, some events might be missed !
|! This strategy is only useful for benchmarking! !
|! Use with caution, YOLO !!! !
|! !
|!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
""".stripMargin)
import pluginConfig.pgPostgresProfile.api._
def store(actions: Seq[DBIO[_]],
notifier: Notifier)
(implicit executionContext: ExecutionContext): Future[Unit] = {
pluginConfig.database.run {
DBIO.seq(actions:_*).transactionally
}.map { _ =>
notifier.eventsAvailable()
}
}
}
class TableLockingWriteStrategy(override val pluginConfig: PluginConfig,
override val system: ActorSystem) extends WriteStrategy {
import pluginConfig.pgPostgresProfile.api._
def store(actions: Seq[DBIO[_]],
notifier: Notifier)
(implicit executionContext: ExecutionContext): Future[Unit] = {
pluginConfig.database.run {
DBIO.seq((sqlu"""lock table #${pluginConfig.fullJournalTableName} in share row exclusive mode"""
+: actions):_*).transactionally
}.map { _ =>
notifier.eventsAvailable()
}
}
}
class RowIdUpdatingStrategy(override val pluginConfig: PluginConfig,
override val system: ActorSystem) extends WriteStrategy {
import driver.api._
private val rowIdUpdater: ActorRef = system.actorOf(RowIdUpdater.props(pluginConfig), "AkkaPgRowIdUpdater")
def store(actions: Seq[DBIO[_]], notifier: Notifier)
(implicit executionContext: ExecutionContext): Future[Unit] = {
pluginConfig.database
.run(DBIO.seq(actions:_*).transactionally)
.map { _ => rowIdUpdater ! RowIdUpdater.UpdateRowIds(notifier) }
}
}
| kwark/akka-persistence-postgresql | modules/akka-persistence-pg/src/main/scala/akka/persistence/pg/journal/WriteStrategy.scala | Scala | mit | 4,266 |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
*/
package com.ksmpartners.ernie.engine.report
import org.testng.annotations.{ BeforeMethod, Test }
import org.testng.Assert
import com.ksmpartners.ernie.model.{ ReportEntity, DefinitionEntity, ReportType }
import scala.collection._
import org.joda.time.DateTime
import org.slf4j.{ LoggerFactory, Logger }
import com.ksmpartners.ernie.util.TestLogger
import java.io.{ ByteArrayInputStream, File }
class MemoryReportManagerTest extends TestLogger {
private var reportManager: MemoryReportManager = new MemoryReportManager
private val log: Logger = LoggerFactory.getLogger("com.ksmpartners.ernie.engine.report.MemoryReportManagerTest")
@BeforeMethod
def setup() {
reportManager = new MemoryReportManager
reportManager.putDefinition("def_1", "DEF_1".getBytes, new DefinitionEntity(DateTime.now(), "def_1", "default", null, "", null, null))
reportManager.putDefinition("def_2", "DEF_2".getBytes, new DefinitionEntity(DateTime.now(), "def_2", "default", null, "", null, null))
reportManager.putDefinition("def_3", "DEF_3".getBytes, new DefinitionEntity(DateTime.now(), "def_3", "default", null, "", null, null))
reportManager.putDefinition("def_4", "DEF_4".getBytes, new DefinitionEntity(DateTime.now(), "def_4", "default", null, "", null, null))
reportManager.putDefinition("def_5", "DEF_5".getBytes, new DefinitionEntity(DateTime.now(), "def_5", "default", null, "", null, null))
reportManager.putReport("rpt_1", "RPT_1".getBytes, new ReportEntity(DateTime.now(), DateTime.now(), "rpt_1", "def_1", "default", null, ReportType.PDF, null, null))
reportManager.putReport("rpt_2", "RPT_2".getBytes, new ReportEntity(DateTime.now(), DateTime.now(), "rpt_2", "def_2", "default", null, ReportType.PDF, null, null))
reportManager.putReport("rpt_3", "RPT_3".getBytes, new ReportEntity(DateTime.now(), DateTime.now(), "rpt_3", "def_3", "default", null, ReportType.PDF, null, null))
reportManager.putReport("rpt_4", "RPT_4".getBytes, new ReportEntity(DateTime.now(), DateTime.now(), "rpt_4", "def_4", "default", null, ReportType.PDF, null, null))
reportManager.putReport("rpt_5", "RPT_5".getBytes, new ReportEntity(DateTime.now(), DateTime.now(), "rpt_5", "def_5", "default", null, ReportType.PDF, null, null))
}
@Test
def testPutReport() {
var entity = new mutable.HashMap[String, Any]()
entity += (ReportManager.rptId -> "rpt_6")
entity += (ReportManager.sourceDefId -> "def_6")
entity += (ReportManager.reportType -> ReportType.CSV)
entity += (ReportManager.createdUser -> "default")
var params = new mutable.HashMap[String, String]()
params += ("PARAM_1" -> "VAL_1")
params += ("PARAM_2" -> "VAL_2")
params += ("PARAM_3" -> "VAL_3")
entity += (ReportManager.paramMap -> params)
val bosR = reportManager.putReport(entity)
Assert.assertFalse(reportManager.hasReport("rpt_6"))
bosR.close()
Assert.assertTrue(reportManager.hasReport("rpt_6"))
val report = reportManager.getReport("rpt_6").get
Assert.assertNotNull(report.getParams)
Assert.assertNotNull(report.getCreatedDate)
Assert.assertNotNull(report.getRetentionDate)
}
@Test
def testPutDefinition() {
var entity = new mutable.HashMap[String, Any]()
entity = new mutable.HashMap[String, Any]()
entity += (ReportManager.defId -> "def_6")
entity += (ReportManager.createdUser -> "default")
val paramList = List("PARAM_1", "PARAM_2", "PARAM_3")
entity += (ReportManager.paramNames -> paramList)
val put = reportManager.putDefinition(entity)
val bosD = put._2
Assert.assertFalse(reportManager.hasDefinition(put._1.getDefId))
bosD.close()
Assert.assertTrue(reportManager.hasDefinition(put._1.getDefId))
val definition = reportManager.getDefinition(put._1.getDefId).get
Assert.assertNotNull(definition.getParamNames)
Assert.assertNotNull(definition.getCreatedDate)
val (dE, s) = reportManager.putDefinition(new DefinitionEntity(DateTime.now(), "def_6", "default", null, "", null, null))
val file = new File(Thread.currentThread.getContextClassLoader.getResource("test_def.rptdesign").getPath)
val xl = scala.xml.XML.loadFile(file)
val len = xl.toString.length
org.apache.commons.io.CopyUtils.copy(new ByteArrayInputStream(xl.toString.getBytes), s)
s.close()
val res = reportManager.getDefinitionContent(dE.getDefId)
Assert.assertTrue(res.isDefined)
try {
val xml = scala.xml.XML.load(res.get)
Assert.assertEquals(xml.toString.length, len)
} catch {
case _ => Assert.assertTrue(false)
}
}
@Test
def testUpdateDefinition() {
var defn = reportManager.getDefinition("def_5")
Assert.assertTrue(defn.isDefined)
var entity = defn.get.getEntity
val prev = entity.getCreatedUser
entity.setCreatedUser(prev + "1")
reportManager.updateDefinitionEntity("def_5", entity)
defn = reportManager.getDefinition("def_5")
Assert.assertTrue(defn.get.getCreatedUser == prev + "1")
}
@Test()
def testGet() {
val buf: Array[Byte] = new Array(5)
reportManager.getDefinitionContent("def_1").get.read(buf)
Assert.assertEquals(buf, "DEF_1".getBytes)
reportManager.getReportContent("rpt_1").get.read(buf)
Assert.assertEquals(buf, "RPT_1".getBytes)
}
@Test
def testHas() {
Assert.assertTrue(reportManager.hasDefinition("def_1"))
Assert.assertTrue(reportManager.hasDefinition("def_2"))
Assert.assertTrue(reportManager.hasDefinition("def_3"))
Assert.assertTrue(reportManager.hasDefinition("def_4"))
Assert.assertTrue(reportManager.hasDefinition("def_5"))
Assert.assertTrue(reportManager.hasReport("rpt_1"))
Assert.assertTrue(reportManager.hasReport("rpt_2"))
Assert.assertTrue(reportManager.hasReport("rpt_3"))
Assert.assertTrue(reportManager.hasReport("rpt_4"))
Assert.assertTrue(reportManager.hasReport("rpt_5"))
Assert.assertFalse(reportManager.hasReport("def_1"))
Assert.assertFalse(reportManager.hasReport("def_2"))
Assert.assertFalse(reportManager.hasReport("def_3"))
Assert.assertFalse(reportManager.hasReport("def_4"))
Assert.assertFalse(reportManager.hasReport("def_5"))
Assert.assertFalse(reportManager.hasDefinition("rpt_1"))
Assert.assertFalse(reportManager.hasDefinition("rpt_2"))
Assert.assertFalse(reportManager.hasDefinition("rpt_3"))
Assert.assertFalse(reportManager.hasDefinition("rpt_4"))
Assert.assertFalse(reportManager.hasDefinition("rpt_5"))
}
@Test(dependsOnMethods = Array("testUpdateDefinition"))
def testDelete() {
reportManager.deleteDefinition("def_1")
reportManager.deleteDefinition("def_2")
reportManager.deleteDefinition("def_3")
reportManager.deleteDefinition("def_4")
reportManager.deleteDefinition("def_5")
reportManager.deleteReport("rpt_1")
reportManager.deleteReport("rpt_2")
reportManager.deleteReport("rpt_3")
reportManager.deleteReport("rpt_4")
reportManager.deleteReport("rpt_5")
Assert.assertFalse(reportManager.hasReport("def_1"))
Assert.assertFalse(reportManager.hasReport("def_2"))
Assert.assertFalse(reportManager.hasReport("def_3"))
Assert.assertFalse(reportManager.hasReport("def_4"))
Assert.assertFalse(reportManager.hasReport("def_5"))
Assert.assertFalse(reportManager.hasDefinition("rpt_1"))
Assert.assertFalse(reportManager.hasDefinition("rpt_2"))
Assert.assertFalse(reportManager.hasDefinition("rpt_3"))
Assert.assertFalse(reportManager.hasDefinition("rpt_4"))
Assert.assertFalse(reportManager.hasDefinition("rpt_5"))
}
@Test
def testGetAll() {
Assert.assertEquals(reportManager.getAllDefinitionIds.sortWith({ (x, y) => x < y }),
List("def_1", "def_2", "def_3", "def_4", "def_5"))
Assert.assertEquals(reportManager.getAllReportIds.sortWith({ (x, y) => x < y }),
List("rpt_1", "rpt_2", "rpt_3", "rpt_4", "rpt_5"))
}
@Test
def missingReportOrDefinitionReturnsNone() {
Assert.assertEquals(reportManager.getReport("FAIL"), None)
Assert.assertEquals(reportManager.getDefinition("FAIL"), None)
}
@Test(expectedExceptions = Array(classOf[IllegalArgumentException]))
def missingDefIdThrowsException() {
var entity = new mutable.HashMap[String, Any]()
entity += (ReportManager.createdUser -> "default")
reportManager.putDefinition(entity)
}
@Test(expectedExceptions = Array(classOf[IllegalArgumentException]))
def missingDefCreatedUserThrowsException() {
var entity = new mutable.HashMap[String, Any]()
entity += (ReportManager.defId -> "def_6")
reportManager.putDefinition(entity)
}
@Test(expectedExceptions = Array(classOf[IllegalArgumentException]))
def missingRptIdThrowsException() {
var entity = new mutable.HashMap[String, Any]()
entity += (ReportManager.sourceDefId -> "def_6")
entity += (ReportManager.reportType -> ReportType.CSV)
entity += (ReportManager.createdUser -> "default")
reportManager.putReport(entity)
}
@Test(expectedExceptions = Array(classOf[IllegalArgumentException]))
def missingSourceDefIdThrowsException() {
var entity = new mutable.HashMap[String, Any]()
entity += (ReportManager.rptId -> "rpt_6")
entity += (ReportManager.reportType -> ReportType.CSV)
entity += (ReportManager.createdUser -> "default")
reportManager.putReport(entity)
}
@Test(expectedExceptions = Array(classOf[IllegalArgumentException]))
def missingReportTypeThrowsException() {
var entity = new mutable.HashMap[String, Any]()
entity += (ReportManager.rptId -> "rpt_6")
entity += (ReportManager.sourceDefId -> "def_6")
entity += (ReportManager.createdUser -> "default")
reportManager.putReport(entity)
}
@Test(expectedExceptions = Array(classOf[IllegalArgumentException]))
def missingRptCreatedUserThrowsException() {
var entity = new mutable.HashMap[String, Any]()
entity += (ReportManager.rptId -> "rpt_6")
entity += (ReportManager.sourceDefId -> "def_6")
entity += (ReportManager.reportType -> ReportType.CSV)
reportManager.putReport(entity)
}
}
| ksmpartners/ernie | ernie-engine/src/test/scala/com/ksmpartners/ernie/engine/report/MemoryReportManagerTest.scala | Scala | apache-2.0 | 10,654 |
package com.twitter.finagle.oauth2
import com.twitter.finagle.http.{HeaderMap, ParamMap}
import org.scalatest.FlatSpec
import org.scalatest.Matchers._
class ClientCredentialFetcherSpec extends FlatSpec {
it should "fetch Basic64" in {
val request = new Request.Authorization(
HeaderMap("Authorization" -> "Basic Y2xpZW50X2lkX3ZhbHVlOmNsaWVudF9zZWNyZXRfdmFsdWU="),
ParamMap()
)
val Some(c) = request.clientCredential
c.clientId should be ("client_id_value")
c.clientSecret should be ("client_secret_value")
}
it should "fetch Basic64 by case insensitive" in {
val request = new Request.Authorization(
HeaderMap("authorization" -> "Basic Y2xpZW50X2lkX3ZhbHVlOmNsaWVudF9zZWNyZXRfdmFsdWU="),
ParamMap()
)
val Some(c) = request.clientCredential
c.clientId should be ("client_id_value")
c.clientSecret should be ("client_secret_value")
}
it should "fetch empty client_secret" in {
val request = new Request.Authorization(
HeaderMap("Authorization" -> "Basic Y2xpZW50X2lkX3ZhbHVlOg=="),
ParamMap()
)
val Some(c) = request.clientCredential
c.clientId should be ("client_id_value")
c.clientSecret should be ("")
}
it should "not fetch no Authorization key in header" in {
val request = new Request.Authorization(
HeaderMap("authorizatio" -> "Basic Y2xpZW50X2lkX3ZhbHVlOmNsaWVudF9zZWNyZXRfdmFsdWU="),
ParamMap()
)
request.clientCredential should be (None)
}
it should "not fetch invalidate Base64" in {
val request = new Request.Authorization(
HeaderMap("Authorization" -> "Basic basic"),
ParamMap()
)
request.clientCredential should be (None)
}
it should "fetch parameter" in {
val request = new Request.Authorization(
HeaderMap(),
ParamMap("client_id" -> "client_id_value", "client_secret" -> "client_secret_value")
)
val Some(c) = request.clientCredential
c.clientId should be ("client_id_value")
c.clientSecret should be ("client_secret_value")
}
it should "omit client_secret" in {
val request = new Request.Authorization(
HeaderMap(),
ParamMap("client_id" -> "client_id_value")
)
val Some(c) = request.clientCredential
c.clientId should be ("client_id_value")
c.clientSecret should be ("")
}
it should "not fetch missing parameter" in {
val request = new Request.Authorization(
HeaderMap(),
ParamMap("client_secret" -> "client_secret_value")
)
request.clientCredential should be (None)
}
it should "not fetch invalid parameter" in {
val request = new Request.Authorization(HeaderMap("Authorization" -> ""), ParamMap())
request.clientCredential should be (None)
}
}
| finagle/finagle-oauth2 | src/test/scala/com/twitter/finagle/oauth2/ClientCredentialFetcherSpec.scala | Scala | apache-2.0 | 2,756 |
Subsets and Splits