code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.accounts
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.accounts.frs10x.boxes.{Directors, _}
import uk.gov.hmrc.ct.accounts.frs10x.retriever.Frs10xDirectorsBoxRetriever
import uk.gov.hmrc.ct.box.CtValidation
import uk.gov.hmrc.ct.box.retriever.FilingAttributesBoxValueRetriever
import uk.gov.hmrc.ct.{CompaniesHouseFiling, HMRCFiling, MicroEntityFiling, StatutoryAccountsFiling}
class AC8023Spec extends WordSpec with MockitoSugar with Matchers {
private trait TestBoxRetriever extends Frs10xDirectorsBoxRetriever with FilingAttributesBoxValueRetriever
"AC8023 validate" should {
"for HMRC Only micro entity filing" when {
"return errors when AC8023 is empty" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(true))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(false))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(true))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(false))
AC8023(None).validate(mockBoxRetriever) shouldBe Set(CtValidation(Some("AC8023"), "error.AC8023.required"))
}
"validate successfully when AC8023 is true" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(true))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(false))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(true))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(false))
AC8023(Some(true)).validate(mockBoxRetriever) shouldBe Set()
}
"validate successfully when AC8023 is false" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(true))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(false))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(true))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(false))
AC8023(Some(false)).validate(mockBoxRetriever) shouldBe Set()
}
}
"for Joint micro entity filing" when {
"return errors when AC8023 is empty" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(true))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(true))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(true))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(false))
AC8023(None).validate(mockBoxRetriever) shouldBe Set(CtValidation(Some("AC8023"), "error.AC8023.required"))
}
"validate successfully when AC8023 is true" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(true))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(true))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(true))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(false))
AC8023(Some(true)).validate(mockBoxRetriever) shouldBe Set()
}
"validate successfully when AC8023 is false" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(true))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(true))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(true))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(false))
AC8023(Some(false)).validate(mockBoxRetriever) shouldBe Set()
}
}
"for CoHo Only micro entity filing" when {
"validate successfully when AC8023 is empty" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(false))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(true))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(true))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(false))
AC8023(None).validate(mockBoxRetriever) shouldBe Set()
}
"cannot exist when AC8023 is true" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(false))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(true))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(true))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(false))
AC8023(Some(true)).validate(mockBoxRetriever) shouldBe Set(CtValidation(Some("AC8023"), "error.AC8023.cannot.exist"))
}
"cannot exist when AC8023 is false" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(false))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(true))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(true))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(false))
AC8023(Some(false)).validate(mockBoxRetriever) shouldBe Set(CtValidation(Some("AC8023"), "error.AC8023.cannot.exist"))
}
}
"for HMRC Only statutory filing" when {
"validate successfully when AC8023 is empty" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(true))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(false))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(false))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(true))
AC8023(None).validate(mockBoxRetriever) shouldBe Set()
}
"validate successfully when AC8023 is true" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(true))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(false))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(false))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(true))
AC8023(Some(true)).validate(mockBoxRetriever) shouldBe Set(CtValidation(Some("AC8023"), "error.AC8023.cannot.exist"))
}
"validate successfully when AC8023 is false" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(true))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(false))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(false))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(true))
AC8023(Some(false)).validate(mockBoxRetriever) shouldBe Set(CtValidation(Some("AC8023"), "error.AC8023.cannot.exist"))
}
}
"for Joint statutory filing" when {
"return errors when AC8023 is empty" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(true))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(true))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(false))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(true))
AC8023(None).validate(mockBoxRetriever) shouldBe Set()
}
"validate successfully when AC8023 is true" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(true))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(true))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(false))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(true))
AC8023(Some(true)).validate(mockBoxRetriever) shouldBe Set(CtValidation(Some("AC8023"), "error.AC8023.cannot.exist"))
}
"validate successfully when AC8023 is false" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(true))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(true))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(false))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(true))
AC8023(Some(false)).validate(mockBoxRetriever) shouldBe Set(CtValidation(Some("AC8023"), "error.AC8023.cannot.exist"))
}
}
"for CoHo Only statutory filing" when {
"validate successfully when AC8023 is empty" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(false))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(true))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(false))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(true))
AC8023(None).validate(mockBoxRetriever) shouldBe Set()
}
"cannot exist when AC8023 is true" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(false))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(true))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(false))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(true))
AC8023(Some(true)).validate(mockBoxRetriever) shouldBe Set(CtValidation(Some("AC8023"), "error.AC8023.cannot.exist"))
}
"cannot exist when AC8023 is false" in {
val mockBoxRetriever = mock[TestBoxRetriever]
when(mockBoxRetriever.hmrcFiling()).thenReturn(HMRCFiling(false))
when(mockBoxRetriever.companiesHouseFiling()).thenReturn(CompaniesHouseFiling(true))
when(mockBoxRetriever.microEntityFiling()).thenReturn(MicroEntityFiling(false))
when(mockBoxRetriever.statutoryAccountsFiling()).thenReturn(StatutoryAccountsFiling(true))
AC8023(Some(false)).validate(mockBoxRetriever) shouldBe Set(CtValidation(Some("AC8023"), "error.AC8023.cannot.exist"))
}
}
}
}
| pncampbell/ct-calculations | src/test/scala/uk/gov/hmrc/ct/ct600/accounts/AC8023Spec.scala | Scala | apache-2.0 | 11,848 |
object Dna {
val RnaMap = Map("G" -> "C", "C" -> "G", "T" -> "A", "A" -> "U")
def apply() = new Dna()
}
class Dna() {
def toRna(s: String): String =
s.split("")
.map(Dna.RnaMap.getOrElse(_, ""))
.mkString("")
}
| daewon/til | exercism/scala/rna-transcription/src/main/scala/dna.scala | Scala | mpl-2.0 | 235 |
package bifrost.transaction.serialization
import bifrost.serialization.Serializer
import bifrost.transaction.bifrostTransaction.BifrostTransaction.Nonce
import bifrost.transaction.bifrostTransaction._
import bifrost.transaction.box.proposition.{Constants25519, PublicKey25519Proposition}
import bifrost.transaction.proof.Signature25519
import com.google.common.primitives.{Bytes, Ints, Longs}
import io.iohk.iodb.ByteArrayWrapper
import scorex.crypto.signatures.Curve25519
import scala.util.Try
object ProgramTransactionCompanion extends Serializer[ProgramTransaction] {
val typeBytes: Array[Byte] = "ProgramTransaction".getBytes
val prefixBytes: Array[Byte] = Ints.toByteArray(typeBytes.length) ++ typeBytes
override def toBytes(m: ProgramTransaction): Array[Byte] = {
prefixBytes ++
(m match {
case cc: ProgramCreation => ProgramCreationCompanion.toChildBytes(cc)
case cme: ProgramMethodExecution => ProgramMethodExecutionCompanion.toChildBytes(cme)
})
}
override def parseBytes(bytes: Array[Byte]): Try[ProgramTransaction] = Try {
val typeLength = Ints.fromByteArray(bytes.take(Ints.BYTES))
val typeStr = new String(bytes.slice(Ints.BYTES, Ints.BYTES + typeLength))
/* Grab the rest of the bytes, which should begin similarly (with sub-type) */
val newBytes = bytes.slice(Ints.BYTES + typeLength, bytes.length)
val newTypeLength = Ints.fromByteArray(newBytes.take(Ints.BYTES))
val newTypeStr = new String(newBytes.slice(Ints.BYTES, Ints.BYTES + newTypeLength))
newTypeStr match {
case "ProgramCreation" => ProgramCreationCompanion.parseBytes(newBytes).get
case "ProgramMethodExecution" => ProgramMethodExecutionCompanion.parseBytes(newBytes).get
}
}
def commonToBytes(m: ProgramTransaction): Array[Byte] = {
Bytes.concat(
Longs.toByteArray(m.timestamp),
m.owner.pubKeyBytes,
m.signatures.head._2.bytes,
Ints.toByteArray(m.preFeeBoxes.head._2.length),
m.preFeeBoxes.head._2.foldLeft(Array[Byte]())((a,b) => a ++ Longs.toByteArray(b._1) ++ Longs.toByteArray(b._2)),
Longs.toByteArray(m.fees.head._2)
)
}
//noinspection ScalaStyle
def commonParseBytes(bytes: Array[Byte]): (
PublicKey25519Proposition,
Map[PublicKey25519Proposition, Signature25519],
Map[PublicKey25519Proposition, IndexedSeq[(Nonce, Long)]],
Map[PublicKey25519Proposition, Long],
Long
) = {
var numReadBytes = 0
val timestamp: Long = Longs.fromByteArray(bytes.slice(0, Longs.BYTES))
numReadBytes += Longs.BYTES
val owner = PublicKey25519Proposition(bytes.slice(numReadBytes, numReadBytes + Constants25519.PubKeyLength))
numReadBytes += Constants25519.PubKeyLength
val signatures: Map[PublicKey25519Proposition, Signature25519] = {
val sig = Signature25519(bytes.slice(numReadBytes, numReadBytes + Curve25519.SignatureLength))
Map(owner -> sig)
}
numReadBytes += Curve25519.SignatureLength
val feePreBoxesLength = Ints.fromByteArray(bytes.slice(numReadBytes, numReadBytes + Ints.BYTES))
numReadBytes += Ints.BYTES
val preBoxes: IndexedSeq[(Nonce, Long)] = (0 until feePreBoxesLength).map { _ =>
val nonce: Nonce = Longs.fromByteArray(bytes.slice(numReadBytes, numReadBytes + Longs.BYTES))
numReadBytes += Longs.BYTES
val amount = Longs.fromByteArray(bytes.slice(numReadBytes, numReadBytes + Longs.BYTES))
numReadBytes += Longs.BYTES
nonce -> amount
}
val feePreBoxes: Map[PublicKey25519Proposition, IndexedSeq[(Nonce, Long)]] = Map(owner -> preBoxes)
val fees: Map[PublicKey25519Proposition, Long] = Map(owner -> Longs.fromByteArray(bytes.slice(numReadBytes, numReadBytes + Longs.BYTES)))
(owner, signatures, feePreBoxes, fees, timestamp)
}
}
| Topl/Project-Bifrost | src/main/scala/bifrost/transaction/serialization/ProgramTransactionCompanion.scala | Scala | mpl-2.0 | 3,915 |
package autolift.scalaz
import scalaz.{Functor, Apply}
import autolift.{LiftAp, LiftApSyntax}
trait ScalazLiftAp[Obj, Fn] extends LiftAp[Obj, Fn]
object ScalazLiftAp extends LowPriorityScalazLiftAp {
def apply[Obj, Fn](implicit lift: ScalazLiftAp[Obj, Fn]): Aux[Obj, Fn, lift.Out] = lift
implicit def base[F[_], A, B](implicit ap: Apply[F]): Aux[F[A], F[A => B], F[B]] =
new ScalazLiftAp[F[A], F[A => B]]{
type Out = F[B]
def apply(fa: F[A], f: F[A => B]) = ap.ap(fa)(f)
}
}
trait LowPriorityScalazLiftAp {
type Aux[Obj, Fn, Out0] = ScalazLiftAp[Obj, Fn]{ type Out = Out0 }
implicit def recur[F[_], G, Fn](implicit functor: Functor[F], lift: LiftAp[G, Fn]): Aux[F[G], Fn, F[lift.Out]] =
new ScalazLiftAp[F[G], Fn]{
type Out = F[lift.Out]
def apply(fg: F[G], f: Fn) = functor.map(fg){ g: G => lift(g, f) }
}
}
final class LiftedAp[A, B, F[_]](protected val f: F[A => B])(implicit ap: Apply[F]){
def andThen[C >: B, D](lf: LiftedAp[C, D, F]) = new LiftedAp(ap.ap(f)(
ap.map(lf.f){
y: (C => D) => { x: (A => B) => x andThen y }
}
))
def compose[C, D <: A](lf: LiftedAp[C, D, F]) = lf andThen this
def map[C](g: B => C): LiftedAp[A, C, F] = new LiftedAp(ap.map(f){ _ andThen g })
def apply[That](that: That)(implicit lift: LiftAp[That, F[A => B]]): lift.Out = lift(that, f)
}
trait LiftApPackage extends LiftApSyntax{
implicit def mkAp[Obj, Fn](implicit lift: ScalazLiftAp[Obj, Fn]): ScalazLiftAp.Aux[Obj, Fn, lift.Out] = lift
implicit def liftedApFunctor[A, F[_]] = new Functor[LiftedAp[A, ?, F]]{
def map[B, C](lap: LiftedAp[A, B, F])(f: B => C) = lap map f
}
def liftAp[A, B, F[_]](f: F[A => B])(implicit ap: Apply[F]) = new LiftedAp(f)
} | wheaties/AutoLifts | autolift-scalaz/src/main/scala/autolift/scalaz/LiftAp.scala | Scala | apache-2.0 | 1,738 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogx.platform.cpumemory.readerwriter
import cogx.cogmath.algebra.real.Matrix
import cogx.cogmath.geometry.Shape
/** The interface for writing a matrix field on the CPU.
*
* @author Greg Snider
*/
trait MatrixFieldWriter extends FieldWriter
{
/** Set the shape of the matrix field for writing.
*
* If the field already has a defined shape, this does nothing. Subclasses
* that allow field shape to be defined must override this method.
*
* @param fieldShape The desired shape of the scalar field for writing
*/
def setShape(fieldShape: Shape, vectorShape: Shape) {}
/** Write `out` to a 0D matrix field. */
def write(out: Matrix): Unit
/** Write `out` to a 1D matrix field at (`col`). */
def write(col: Int, out: Matrix): Unit
/** Write `out` to a 2D matrix field at (`row`, `col`). */
def write(row: Int, col: Int, out: Matrix): Unit
/** Write `out` to a 3D matrix field at (`row`, `col`). */
def write(layer: Int, row: Int, col: Int, out: Matrix): Unit
} | hpe-cct/cct-core | src/main/scala/cogx/platform/cpumemory/readerwriter/MatrixFieldWriter.scala | Scala | apache-2.0 | 1,645 |
package org.jetbrains.plugins.scala.lang.parser.parsing.types
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.parser.{ParserNode, ScalaElementTypes}
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
import org.jetbrains.plugins.scala.lang.parser.util.ParserUtils
/**
* @author Alexander Podkhalyuzin
*/
/*
* Types ::= Type {',' Type}
*/
object Types extends Types {
override protected def `type` = ParamType
}
trait Types extends ParserNode {
protected def `type`: ParamType
def parse(builder: ScalaPsiBuilder): (Boolean, Boolean) ={
var isTuple = false
def typesParse() = if (`type`.parseInner(builder)) {
true
} else if (builder.getTokenType == ScalaTokenTypes.tUNDER) {
builder.advanceLexer()
true
} else {
false
}
val typesMarker = builder.mark
if (!typesParse) {
typesMarker.drop()
return (false,isTuple)
}
var exit = true
while (exit && builder.getTokenType == ScalaTokenTypes.tCOMMA && !ParserUtils.eatTrailingComma(builder, ScalaTokenTypes.tRPARENTHESIS)) {
isTuple = true
builder.advanceLexer() //Ate ,
if (!typesParse) {
exit = false
//builder error ScalaBundle.message("wrong.type",new Array[Object](0))
}
}
if (isTuple) typesMarker.done(ScalaElementTypes.TYPES)
else typesMarker.drop()
return (true,isTuple)
}
} | gtache/intellij-lsp | intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/parser/parsing/types/Types.scala | Scala | apache-2.0 | 1,456 |
// 20.6: Question: instead of an abstract type member, why not just use a type parameter?
class Food
abstract class Animal[SuitableFood <: Food] {
def eat(food:SuitableFood)
}
// Cows
class Grass extends Food
class Cow extends Animal[Grass] {
override def eat(food: Grass) {
}
}
// One difference using type parameter instead of abstract
// type member is that you must specify the type parameter
// whenever refering to the Animal type
// For example
//scala> class Fish extends Food
//defined class Fish
//
//scala> val bessy:Animal = new Cow
//<console>:11: error: class Animal takes type parameters
// val bessy:Animal = new Cow
// ^
//
| machristie/ProgrammingInScala | ch20/Animal.scala | Scala | unlicense | 675 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding.parquet.thrift
import org.apache.parquet.cascading.{ ParquetTBaseScheme, ParquetValueScheme }
import cascading.scheme.Scheme
import com.twitter.scalding._
import com.twitter.scalding.parquet.{ StrictColumnProjectionString, DeprecatedColumnProjectionString, HasColumnProjection, HasFilterPredicate }
import com.twitter.scalding.source.{ DailySuffixSource, HourlySuffixSource }
import java.io.Serializable
import org.apache.thrift.{ TBase, TFieldIdEnum }
object ParquetThrift extends Serializable {
type ThriftBase = TBase[_ <: TBase[_, _], _ <: TFieldIdEnum]
}
trait ParquetThriftBase[T] extends FileSource with SingleMappable[T] with TypedSink[T] with LocalTapSource with HasFilterPredicate with HasColumnProjection {
def mf: Manifest[T]
def config: ParquetValueScheme.Config[T] = {
val config = new ParquetValueScheme.Config[T].withRecordClass(mf.runtimeClass.asInstanceOf[Class[T]])
val configWithFp = withFilter match {
case Some(fp) => config.withFilterPredicate(fp)
case None => config
}
val configWithProjection = columnProjectionString match {
case Some(s @ DeprecatedColumnProjectionString(_)) => configWithFp.withProjectionString(s.asSemicolonString)
case Some(s @ StrictColumnProjectionString(_)) => configWithFp.withStrictProjectionString(s.asSemicolonString)
case None => configWithFp
}
configWithProjection
}
override def setter[U <: T] = TupleSetter.asSubSetter[T, U](TupleSetter.singleSetter[T])
}
trait ParquetThrift[T <: ParquetThrift.ThriftBase] extends ParquetThriftBase[T] {
override def hdfsScheme = {
// See docs in Parquet346TBaseScheme
val scheme = new Parquet346TBaseScheme[T](this.config)
HadoopSchemeInstance(scheme.asInstanceOf[Scheme[_, _, _, _, _]])
}
}
/**
* When Using these sources or creating subclasses of them, you can
* provide a filter predicate and / or a set of fields (columns) to keep (project).
*
* The filter predicate will be pushed down to the input format, potentially
* making the filter significantly more efficient than a filter applied to
* a TypedPipe (parquet push-down filters can skip reading entire chunks of data off disk).
*
* For data with a large schema (many fields / columns), providing the set of columns
* you intend to use can also make your job significantly more efficient (parquet column projection
* push-down will skip reading unused columns from disk).
* The columns are specified in the format described here:
* https://github.com/apache/parquet-mr/blob/master/parquet_cascading.md#21-projection-pushdown-with-thriftscrooge-records
*
* These settings are defined in the traits [[com.twitter.scalding.parquet.HasFilterPredicate]]
* and [[com.twitter.scalding.parquet.HasColumnProjection]]
*
* Here are two ways you can use these in a parquet source:
*
* {{{
* class MyParquetSource(dr: DateRange) extends DailySuffixParquetThrift("/a/path", dr)
*
* val mySourceFilteredAndProjected = new MyParquetSource(dr) {
* override val withFilter: Option[FilterPredicate] = Some(myFp)
* override val withColumnProjections: Set[String] = Set("a.b.c", "x.y")
* }
* }}}
*
* The other way is to add these as constructor arguments:
*
* {{{
* class MyParquetSource(
* dr: DateRange,
* override val withFilter: Option[FilterPredicate] = None
* override val withColumnProjections: Set[String] = Set()
* ) extends DailySuffixParquetThrift("/a/path", dr)
*
* val mySourceFilteredAndProjected = new MyParquetSource(dr, Some(myFp), Set("a.b.c", "x.y"))
* }}}
*/
class DailySuffixParquetThrift[T <: ParquetThrift.ThriftBase](
path: String,
dateRange: DateRange)(implicit override val mf: Manifest[T])
extends DailySuffixSource(path, dateRange) with ParquetThrift[T]
class HourlySuffixParquetThrift[T <: ParquetThrift.ThriftBase](
path: String,
dateRange: DateRange)(implicit override val mf: Manifest[T])
extends HourlySuffixSource(path, dateRange) with ParquetThrift[T]
class FixedPathParquetThrift[T <: ParquetThrift.ThriftBase](paths: String*)(implicit override val mf: Manifest[T])
extends FixedPathSource(paths: _*) with ParquetThrift[T]
| benpence/scalding | scalding-parquet/src/main/scala/com/twitter/scalding/parquet/thrift/ParquetThrift.scala | Scala | apache-2.0 | 4,733 |
/**
* Illustrates a simple map partition to parse JSON data in Scala
* Loads the data into a case class with the name and a boolean flag
* if the person loves pandas.
*/
package com.oreilly.learningsparkexamples.scala
import scala.collection.JavaConversions._
import org.apache.spark._
import org.apache.hadoop.mapred.KeyValueTextInputFormat
import org.apache.hadoop.io.{MapWritable, Text}
import java.util.HashMap
object LoadKeyValueTextInput {
def main(args: Array[String]) {
if (args.length < 2) {
println("Usage: [sparkmaster] [inputfile]")
exit(1)
}
val master = args(0)
val inputFile = args(1)
val sc = new SparkContext(master, "LoadKeyValueTextInput", System.getenv("SPARK_HOME"))
val input = sc.hadoopFile[Text, Text, KeyValueTextInputFormat](inputFile).map{
case (x, y) => (x.toString, y.toString)
}
println(input.collect().toList)
}
}
| holdenk/learning-spark-examples | src/main/scala/com/oreilly/learningsparkexamples/scala/LoadKeyValueTextInput.scala | Scala | mit | 907 |
package com.raquo.domtypes.generic.builders
import com.raquo.domtypes.generic.defs.styles.{keywords, units}
/**
* This trait contains functionality for creating CSS Styles and Style Setters.
*
* @tparam Prop Representation of a DOM CSS style property, e.g. "background"
* @tparam Setter Representation of a key-value pair, for a specific style property and its value
*/
trait StylePropBuilder[Prop[_], Setter[_], DerivedProp[_], LengthNum] {
type AutoStyle[V] = Prop[V] with keywords.AutoStyle[Setter[V]]
type AlignContentStyle = Prop[String] with keywords.AlignContentStyle[Setter[String]]
type BackgroundAttachmentStyle = Prop[String] with keywords.BackgroundAttachmentStyle[Setter[String]]
type BackgroundSizeStyle = Prop[String] with keywords.BackgroundSizeStyle[Setter[String], DerivedProp, LengthNum]
type BackfaceVisibilityStyle = Prop[String] with keywords.BackfaceVisibilityStyle[Setter[String]]
type BorderCollapseStyle = Prop[String] with keywords.BorderCollapseStyle[Setter[String]]
type BoxSizingStyle = Prop[String] with keywords.BoxSizingStyle[Setter[String]]
type ClearStyle = Prop[String] with keywords.ClearStyle[Setter[String]]
type ColorStyle = Prop[String] with keywords.ColorStyle[Setter[String]]
type CursorStyle = Prop[String] with keywords.CursorStyle[Setter[String]]
type DirectionStyle = Prop[String] with keywords.DirectionStyle[Setter[String]]
type DisplayStyle = Prop[String] with keywords.DisplayStyle[Setter[String]]
type EmptyCellsStyle = Prop[String] with keywords.EmptyCellsStyle[Setter[String]]
type FlexWrapStyle = Prop[String] with keywords.FlexWrapStyle[Setter[String]]
type FlexDirectionStyle = Prop[String] with keywords.FlexDirectionStyle[Setter[String]]
type FlexPositionStyle = Prop[String] with keywords.FlexPositionStyle[Setter[String]]
type FloatStyle = Prop[String] with keywords.FloatStyle[Setter[String]]
type FontSizeStyle = Prop[String] with keywords.FontSizeStyle[Setter[String], DerivedProp, LengthNum]
type FontStyleStyle = Prop[String] with keywords.FontStyleStyle[Setter[String]]
type FontWeightStyle = Prop[String] with keywords.FontWeightStyle[Setter[String]]
type JustifyContentStyle = Prop[String] with keywords.JustifyContentStyle[Setter[String]]
type LengthStyle = Prop[String] with units.LengthUnits[DerivedProp, LengthNum] with StyleStringValueBuilder[Setter[String]]
type LineStyle = Prop[String] with keywords.LineStyle[Setter[String]]
type ListStylePositionStyle = Prop[String] with keywords.ListStylePositionStyle[Setter[String]]
type ListStyleTypeStyle = Prop[String] with keywords.ListStyleTypeStyle[Setter[String]]
type MaxLengthStyle = Prop[String] with keywords.MinMaxLengthStyle[Setter[String], DerivedProp, LengthNum] with keywords.NoneStyle[Setter[String]]
type MinLengthStyle = Prop[String] with keywords.MinMaxLengthStyle[Setter[String], DerivedProp, LengthNum] with keywords.AutoStyle[Setter[String]]
type NoneStyle[V] = Prop[V] with keywords.NoneStyle[Setter[V]]
type NormalStyle[V] = Prop[V] with keywords.NormalStyle[Setter[V]]
type OverflowStyle = Prop[String] with keywords.OverflowStyle[Setter[String]]
type OverflowWrapStyle = Prop[String] with keywords.OverflowWrapStyle[Setter[String]]
type PaddingBoxSizingStyle = Prop[String] with keywords.PaddingBoxSizingStyle[Setter[String]]
type PageBreakStyle = Prop[String] with keywords.PageBreakStyle[Setter[String]]
type PointerEventsStyle = Prop[String] with keywords.PointerEventsStyle[Setter[String]]
type PositionStyle = Prop[String] with keywords.PositionStyle[Setter[String]]
type TableLayoutStyle = Prop[String] with keywords.TableLayoutStyle[Setter[String]]
type TextAlignStyle = Prop[String] with keywords.TextAlignStyle[Setter[String]]
type TextDecorationStyle = Prop[String] with keywords.TextDecorationStyle[Setter[String]]
type TextOverflowStyle = Prop[String] with keywords.TextOverflowStyle[Setter[String]]
type TextTransformStyle = Prop[String] with keywords.TextTransformStyle[Setter[String]]
type TextUnderlinePositionStyle = Prop[String] with keywords.TextUnderlinePositionStyle[Setter[String]]
type TimeStyle = Prop[String] with units.TimeUnits[DerivedProp] with StyleStringValueBuilder[Setter[String]]
type UrlStyle = Prop[String] with units.UrlUnits[DerivedProp] with StyleStringValueBuilder[Setter[String]]
type VerticalAlignStyle = Prop[String] with keywords.VerticalAlignStyle[Setter[String], DerivedProp, LengthNum]
type VisibilityStyle = Prop[String] with keywords.VisibilityStyle[Setter[String]]
type WhiteSpaceStyle = Prop[String] with keywords.WhiteSpaceStyle[Setter[String]]
type WordBreakStyle = Prop[String] with keywords.WordBreakStyle[Setter[String]]
// -- Basic style types --
protected def stringStyle(key: String): Prop[String] with StyleStringValueBuilder[Setter[String]]
protected def intStyle(key: String): Prop[Int] with units.CalcUnits[DerivedProp] with StyleStringValueBuilder[Setter[Int]]
protected def doubleStyle(key: String): Prop[Double] with units.CalcUnits[DerivedProp] with StyleStringValueBuilder[Setter[Double]]
// -- Shared custom types --
protected def autoStyle[V](key: String): AutoStyle[V]
protected def colorStyle(key: String): ColorStyle
protected def colorUrlStyle(key: String): ColorStyle with UrlStyle
protected def flexPositionStyle(key: String): FlexPositionStyle
protected def lengthStyle(key: String): LengthStyle
protected def lengthAutoStyle(key: String): LengthStyle with AutoStyle[String]
protected def lengthNormalStyle(key: String): LengthStyle with NormalStyle[String]
protected def lineStyle(key: String): LineStyle
protected def maxLengthStyle(key: String): MaxLengthStyle
protected def minLengthStyle(key: String): MinLengthStyle
protected def noneStyle[V](key: String): NoneStyle[V]
protected def normalStyle[V](key: String): NormalStyle[V]
protected def overflowStyle(key: String): OverflowStyle
protected def paddingBoxSizingStyle(key: String): PaddingBoxSizingStyle
protected def pageBreakStyle(key: String): PageBreakStyle
protected def textAlignStyle(key: String): TextAlignStyle
protected def timeStyle(key: String): TimeStyle
protected def urlStyle(key: String): UrlStyle
protected def urlNoneStyle(key: String): UrlStyle with NoneStyle[String]
// -- Unique custom types --
protected def alignContentStyle(key: String): AlignContentStyle
protected def backgroundAttachmentStyle(key: String): BackgroundAttachmentStyle
protected def backgroundSizeStyle(key: String): BackgroundSizeStyle
protected def backfaceVisibilityStyle(key: String): BackfaceVisibilityStyle
protected def borderCollapse(key: String): BorderCollapseStyle
protected def boxSizingStyle(key: String): BoxSizingStyle
protected def clearStyle(key: String): ClearStyle
protected def cursorStyle(key: String): CursorStyle
protected def directionStyle(key: String): DirectionStyle
protected def displayStyle(key: String): DisplayStyle
protected def emptyCellsStyle(key: String): EmptyCellsStyle
protected def flexWrapStyle(key: String): FlexWrapStyle
protected def flexDirectionStyle(key: String): FlexDirectionStyle
protected def floatStyle(key: String): FloatStyle
protected def fontSizeStyle(key: String): FontSizeStyle
protected def fontStyleStyle(key: String): FontStyleStyle
protected def fontWeightStyle(key: String): FontWeightStyle
protected def justifyContentStyle(key: String): JustifyContentStyle
protected def listStylePositionStyle(key: String): ListStylePositionStyle
protected def listStyleTypeStyle(key: String): ListStyleTypeStyle
protected def overflowWrapStyle(key: String): OverflowWrapStyle
protected def pointerEventsStyle(key: String): PointerEventsStyle
protected def positionStyle(key: String): PositionStyle
protected def tableLayoutStyle(key: String): TableLayoutStyle
protected def textDecorationStyle(key: String): TextDecorationStyle
protected def textOverflowStyle(key: String): TextOverflowStyle
protected def textTransformStyle(key: String): TextTransformStyle
protected def textUnderlinePositionStyle(key: String): TextUnderlinePositionStyle
protected def verticalAlignStyle(key: String): VerticalAlignStyle
protected def visibilityStyle(key: String): VisibilityStyle
protected def whiteSpaceStyle(key: String): WhiteSpaceStyle
protected def wordBreakStyle(key: String): WordBreakStyle
}
| raquo/scala-dom-types | shared/src/main/scala/com/raquo/domtypes/generic/builders/StylePropBuilder.scala | Scala | mit | 8,544 |
/**
* Copyright (c) 2007-2011 Eric Torreborre <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without limitation
* the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
* and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial portions of
* the Software. Neither the name of specs nor the names of its contributors may be used to endorse or promote
* products derived from this software without specific prior written permission.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
* TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
* CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
package org.specs.specification
import org.specs._
class specificationContextSpec extends SpecificationWithJUnit {
"A specification context" can {
"be used to specify the actions before the specification" in {
specificationMustDo("beforeSpec")
}
"be used to specify the actions after the specification" in {
specificationMustDo("afterSpec")
}
"be used to specify the actions before a sus" in {
specificationMustDo("beforeSus")
}
"be used to specify the actions after a sus" in {
specificationMustDo("afterSus")
}
"be used to specify the actions before an example" in {
specificationMustDo("beforeExample")
}
"be used to specify the actions after an example" in {
specificationMustDo("afterExample")
}
"be used to specify the actions around an example" in {
specificationMustDo("around")
}
"be used to specify the all actions" in {
specificationMustDo("beforeSpec",
"beforeSus",
"beforeExample",
"around",
"afterExample",
"afterSus",
"afterSpec"
)
}
}
"A specification context" should {
"not use around actions on a sus but only on examples and subexamples" in {
val spec = new TestedSpecification("beforeSus", "beforeExample", "around", "afterExample", "afterSus") {
"A system with a nested example" should {
"have one example with a subexample" in {
"this is a subexample" in { 1 must_== 1 }
}
}
}
specificationMustDo(spec,
"beforeSus",
"beforeExample", "around", "afterExample",
"afterSus")
}
}
def specificationMustDo(s: String*): Any = {
val spec = new SimpleSpecification(ContextParams(s:_*))
specificationMustDo(spec)
}
def specificationMustDo(spec: TestedSpecification): Any = {
specificationMustDo(spec: TestedSpecification, spec.params.values:_*)
}
def specificationMustDo(spec: TestedSpecification, expected: String*): Any = {
noDetailedDiffs()
spec.reportSpecs
spec.out.toList aka spec.messages.mkString("\n") must_== expected.toList.map(_+"_ok")
}
}
import org.specs.io.mock._
case class ContextParams(values: String*) {
def contain(s: String) = values.contains(s)
}
class TestedSpecification(val params: ContextParams) extends Specification with MockOutput {
def this(values: String*) = this(ContextParams(values:_*))
val out = new scala.collection.mutable.ListBuffer[String]
new SpecContext {
if (params.contain("beforeSpec")) beforeSpec(out.append("beforeSpec_ok"))
if (params.contain("beforeSus")) beforeSus(out.append("beforeSus_ok"))
if (params.contain("beforeExample")) before(out.append("beforeExample_ok"))
def output(a: =>Any) = { out.append("around_ok"); a }
if (params.contain("around")) aroundExpectations(output(_))
if (params.contain("afterExample")) after(out.append("afterExample_ok"))
if (params.contain("afterSus")) afterSus(out.append("afterSus_ok"))
if (params.contain("afterSpec")) afterSpec(out.append("afterSpec_ok"))
}
}
class SimpleSpecification(params: ContextParams) extends TestedSpecification(params) {
"this system" should { "have one example" in { 1 must_== 1 } }
}
| stuhood/specs | src/test/scala/org/specs/specification/specificationContextSpec.scala | Scala | mit | 4,801 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive
import java.io.File
import org.apache.spark.sql.{AnalysisException, Dataset, QueryTest, SaveMode}
import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.execution.columnar.InMemoryTableScanExec
import org.apache.spark.sql.execution.datasources.{CatalogFileIndex, HadoopFsRelation, LogicalRelation}
import org.apache.spark.sql.execution.datasources.parquet.ParquetFileFormat
import org.apache.spark.sql.hive.test.TestHiveSingleton
import org.apache.spark.sql.test.SQLTestUtils
import org.apache.spark.sql.types.StructType
import org.apache.spark.storage.RDDBlockId
import org.apache.spark.util.Utils
class CachedTableSuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
import hiveContext._
def rddIdOf(tableName: String): Int = {
val plan = table(tableName).queryExecution.sparkPlan
plan.collect {
case InMemoryTableScanExec(_, _, relation) =>
relation.cachedColumnBuffers.id
case _ =>
fail(s"Table $tableName is not cached\n" + plan)
}.head
}
def isMaterialized(rddId: Int): Boolean = {
val maybeBlock = sparkContext.env.blockManager.get(RDDBlockId(rddId, 0))
maybeBlock.foreach(_ => sparkContext.env.blockManager.releaseLock(RDDBlockId(rddId, 0)))
maybeBlock.nonEmpty
}
test("cache table") {
val preCacheResults = sql("SELECT * FROM src").collect().toSeq
cacheTable("src")
assertCached(sql("SELECT * FROM src"))
checkAnswer(
sql("SELECT * FROM src"),
preCacheResults)
assertCached(sql("SELECT * FROM src s"))
checkAnswer(
sql("SELECT * FROM src s"),
preCacheResults)
uncacheTable("src")
assertCached(sql("SELECT * FROM src"), 0)
}
test("cache invalidation") {
sql("CREATE TABLE cachedTable(key INT, value STRING)")
sql("INSERT INTO TABLE cachedTable SELECT * FROM src")
checkAnswer(sql("SELECT * FROM cachedTable"), table("src").collect().toSeq)
cacheTable("cachedTable")
checkAnswer(sql("SELECT * FROM cachedTable"), table("src").collect().toSeq)
sql("INSERT INTO TABLE cachedTable SELECT * FROM src")
checkAnswer(
sql("SELECT * FROM cachedTable"),
table("src").collect().toSeq ++ table("src").collect().toSeq)
sql("DROP TABLE cachedTable")
}
test("Drop cached table") {
sql("CREATE TABLE cachedTableTest(a INT)")
cacheTable("cachedTableTest")
sql("SELECT * FROM cachedTableTest").collect()
sql("DROP TABLE cachedTableTest")
intercept[AnalysisException] {
sql("SELECT * FROM cachedTableTest").collect()
}
}
test("DROP nonexistant table") {
sql("DROP TABLE IF EXISTS nonexistantTable")
}
test("uncache of nonexistant tables") {
// make sure table doesn't exist
intercept[NoSuchTableException](spark.table("nonexistantTable"))
intercept[NoSuchTableException] {
spark.catalog.uncacheTable("nonexistantTable")
}
intercept[NoSuchTableException] {
sql("UNCACHE TABLE nonexistantTable")
}
sql("UNCACHE TABLE IF EXISTS nonexistantTable")
}
test("no error on uncache of non-cached table") {
val tableName = "newTable"
withTable(tableName) {
sql(s"CREATE TABLE $tableName(a INT)")
// no error will be reported in the following three ways to uncache a table.
spark.catalog.uncacheTable(tableName)
sql("UNCACHE TABLE newTable")
sparkSession.table(tableName).unpersist()
}
}
test("'CACHE TABLE' and 'UNCACHE TABLE' HiveQL statement") {
sql("CACHE TABLE src")
assertCached(table("src"))
assert(spark.catalog.isCached("src"), "Table 'src' should be cached")
sql("UNCACHE TABLE src")
assertCached(table("src"), 0)
assert(!spark.catalog.isCached("src"), "Table 'src' should not be cached")
}
test("CACHE TABLE tableName AS SELECT * FROM anotherTable") {
withTempView("testCacheTable") {
sql("CACHE TABLE testCacheTable AS SELECT * FROM src")
assertCached(table("testCacheTable"))
val rddId = rddIdOf("testCacheTable")
assert(
isMaterialized(rddId),
"Eagerly cached in-memory table should have already been materialized")
uncacheTable("testCacheTable")
assert(!isMaterialized(rddId), "Uncached in-memory table should have been unpersisted")
}
}
test("CACHE TABLE tableName AS SELECT ...") {
withTempView("testCacheTable") {
sql("CACHE TABLE testCacheTable AS SELECT key FROM src LIMIT 10")
assertCached(table("testCacheTable"))
val rddId = rddIdOf("testCacheTable")
assert(
isMaterialized(rddId),
"Eagerly cached in-memory table should have already been materialized")
uncacheTable("testCacheTable")
assert(!isMaterialized(rddId), "Uncached in-memory table should have been unpersisted")
}
}
test("CACHE LAZY TABLE tableName") {
sql("CACHE LAZY TABLE src")
assertCached(table("src"))
val rddId = rddIdOf("src")
assert(
!isMaterialized(rddId),
"Lazily cached in-memory table shouldn't be materialized eagerly")
sql("SELECT COUNT(*) FROM src").collect()
assert(
isMaterialized(rddId),
"Lazily cached in-memory table should have been materialized")
uncacheTable("src")
assert(!isMaterialized(rddId), "Uncached in-memory table should have been unpersisted")
}
test("CACHE TABLE with Hive UDF") {
withTempView("udfTest") {
sql("CACHE TABLE udfTest AS SELECT * FROM src WHERE floor(key) = 1")
assertCached(table("udfTest"))
uncacheTable("udfTest")
}
}
test("REFRESH TABLE also needs to recache the data (data source tables)") {
val tempPath: File = Utils.createTempDir()
tempPath.delete()
table("src").write.mode(SaveMode.Overwrite).parquet(tempPath.toString)
sql("DROP TABLE IF EXISTS refreshTable")
sparkSession.catalog.createExternalTable("refreshTable", tempPath.toString, "parquet")
checkAnswer(
table("refreshTable"),
table("src").collect())
// Cache the table.
sql("CACHE TABLE refreshTable")
assertCached(table("refreshTable"))
// Append new data.
table("src").write.mode(SaveMode.Append).parquet(tempPath.toString)
// We are still using the old data.
assertCached(table("refreshTable"))
checkAnswer(
table("refreshTable"),
table("src").collect())
// Refresh the table.
sql("REFRESH TABLE refreshTable")
// We are using the new data.
assertCached(table("refreshTable"))
checkAnswer(
table("refreshTable"),
table("src").union(table("src")).collect())
// Drop the table and create it again.
sql("DROP TABLE refreshTable")
sparkSession.catalog.createExternalTable("refreshTable", tempPath.toString, "parquet")
// It is not cached.
assert(!isCached("refreshTable"), "refreshTable should not be cached.")
// Refresh the table. REFRESH TABLE command should not make a uncached
// table cached.
sql("REFRESH TABLE refreshTable")
checkAnswer(
table("refreshTable"),
table("src").union(table("src")).collect())
// It is not cached.
assert(!isCached("refreshTable"), "refreshTable should not be cached.")
sql("DROP TABLE refreshTable")
Utils.deleteRecursively(tempPath)
}
test("SPARK-15678: REFRESH PATH") {
val tempPath: File = Utils.createTempDir()
tempPath.delete()
table("src").write.mode(SaveMode.Overwrite).parquet(tempPath.toString)
sql("DROP TABLE IF EXISTS refreshTable")
sparkSession.catalog.createExternalTable("refreshTable", tempPath.toString, "parquet")
checkAnswer(
table("refreshTable"),
table("src").collect())
// Cache the table.
sql("CACHE TABLE refreshTable")
assertCached(table("refreshTable"))
// Append new data.
table("src").write.mode(SaveMode.Append).parquet(tempPath.toString)
// We are still using the old data.
assertCached(table("refreshTable"))
checkAnswer(
table("refreshTable"),
table("src").collect())
// Refresh the table.
sql(s"REFRESH ${tempPath.toString}")
// We are using the new data.
assertCached(table("refreshTable"))
checkAnswer(
table("refreshTable"),
table("src").union(table("src")).collect())
// Drop the table and create it again.
sql("DROP TABLE refreshTable")
sparkSession.catalog.createExternalTable("refreshTable", tempPath.toString, "parquet")
// It is not cached.
assert(!isCached("refreshTable"), "refreshTable should not be cached.")
// Refresh the table. REFRESH command should not make a uncached
// table cached.
sql(s"REFRESH ${tempPath.toString}")
checkAnswer(
table("refreshTable"),
table("src").union(table("src")).collect())
// It is not cached.
assert(!isCached("refreshTable"), "refreshTable should not be cached.")
sql("DROP TABLE refreshTable")
Utils.deleteRecursively(tempPath)
}
test("Cache/Uncache Qualified Tables") {
withTempDatabase { db =>
withTempView("cachedTable") {
sql(s"CREATE TABLE $db.cachedTable STORED AS PARQUET AS SELECT 1")
sql(s"CACHE TABLE $db.cachedTable")
assertCached(spark.table(s"$db.cachedTable"))
activateDatabase(db) {
assertCached(spark.table("cachedTable"))
sql("UNCACHE TABLE cachedTable")
assert(!spark.catalog.isCached("cachedTable"), "Table 'cachedTable' should not be cached")
sql(s"CACHE TABLE cachedTable")
assert(spark.catalog.isCached("cachedTable"), "Table 'cachedTable' should be cached")
}
sql(s"UNCACHE TABLE $db.cachedTable")
assert(!spark.catalog.isCached(s"$db.cachedTable"),
"Table 'cachedTable' should not be cached")
}
}
}
test("Cache Table As Select - having database name") {
withTempDatabase { db =>
withTempView("cachedTable") {
val e = intercept[ParseException] {
sql(s"CACHE TABLE $db.cachedTable AS SELECT 1")
}.getMessage
assert(e.contains("It is not allowed to add database prefix ") &&
e.contains("to the table name in CACHE TABLE AS SELECT"))
}
}
}
test("SPARK-11246 cache parquet table") {
sql("CREATE TABLE cachedTable STORED AS PARQUET AS SELECT 1")
cacheTable("cachedTable")
val sparkPlan = sql("SELECT * FROM cachedTable").queryExecution.sparkPlan
assert(sparkPlan.collect { case e: InMemoryTableScanExec => e }.size === 1)
sql("DROP TABLE cachedTable")
}
test("cache a table using CatalogFileIndex") {
withTable("test") {
sql("CREATE TABLE test(i int) PARTITIONED BY (p int) STORED AS parquet")
val tableMeta = spark.sharedState.externalCatalog.getTable("default", "test")
val catalogFileIndex = new CatalogFileIndex(spark, tableMeta, 0)
val dataSchema = StructType(tableMeta.schema.filterNot { f =>
tableMeta.partitionColumnNames.contains(f.name)
})
val relation = HadoopFsRelation(
location = catalogFileIndex,
partitionSchema = tableMeta.partitionSchema,
dataSchema = dataSchema,
bucketSpec = None,
fileFormat = new ParquetFileFormat(),
options = Map.empty)(sparkSession = spark)
val plan = LogicalRelation(relation, catalogTable = Some(tableMeta))
spark.sharedState.cacheManager.cacheQuery(Dataset.ofRows(spark, plan))
assert(spark.sharedState.cacheManager.lookupCachedData(plan).isDefined)
val sameCatalog = new CatalogFileIndex(spark, tableMeta, 0)
val sameRelation = HadoopFsRelation(
location = sameCatalog,
partitionSchema = tableMeta.partitionSchema,
dataSchema = dataSchema,
bucketSpec = None,
fileFormat = new ParquetFileFormat(),
options = Map.empty)(sparkSession = spark)
val samePlan = LogicalRelation(sameRelation, catalogTable = Some(tableMeta))
assert(spark.sharedState.cacheManager.lookupCachedData(samePlan).isDefined)
}
}
}
| Panos-Bletsos/spark-cost-model-optimizer | sql/hive/src/test/scala/org/apache/spark/sql/hive/CachedTableSuite.scala | Scala | apache-2.0 | 12,894 |
package transactional
object MegaBench extends Benchmark {
type Transactional[T] = Transaction ?=> T
def transaction[T](op: Transactional[T]): T = {
implicit val trans: Transaction = new Transaction
val res = op
trans.commit()
res
}
def thisTransaction: Transactional[Transaction] = implicitly[Transaction]
abstract class Op {
def f(x: Int): Transactional[Int]
}
class Op0 extends Op {
def f(x: Int): Transactional[Int] = {
thisTransaction.println("0th step")
x
}
}
class Op1 extends Op {
def f(x: Int): Transactional[Int] = {
thisTransaction.println("first step")
x + 1
}
}
class Op2 extends Op {
def f(x: Int): Transactional[Int] = {
thisTransaction.println("second step")
x + 2
}
}
class Op3 extends Op {
def f(x: Int): Transactional[Int] = {
thisTransaction.println("third step")
x + 3
}
}
val op = Array[Op](new Op0, new Op1, new Op2, new Op3)
def f(x: Int, n: Int): Transactional[Int] = {
thisTransaction.println("fourth step")
if (n > 0) f(op(n % 4).f(x), n - 1)
else {
if (x % 2 != 0) thisTransaction.abort()
x
}
}
def run(): Int = {
transaction {
val res = f(7, 10)
assert(!thisTransaction.isAborted)
assert(res == 22)
res
}
}
}
object ImplicitMega extends Runner("megamorphic", MegaBench, 22)
| dotty-staging/dotty | tests/bench/transactional/ImplicitMega.scala | Scala | apache-2.0 | 1,412 |
/*
* Copyright (C) 2005, The OpenURP Software.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.openurp.rd.achievement.model
import org.beangle.data.model.LongId
import org.beangle.data.model.pojo.Named
import org.openurp.base.model.User
/** 教材作者 */
class TextbookEditor extends LongId with Named {
/** 教材成果 */
var achievement: TextbookAchievement = _
/** 是否主编 */
var chief: Boolean = _
/** 对应校内用户 */
var user: Option[User] = _
/** 主编或者参编排序 */
var idx: Int = _
}
| openurp/api | rd/src/main/scala/org/openurp/rd/achievement/model/TextbookEditor.scala | Scala | lgpl-3.0 | 1,174 |
/*
* Created on 2010/08/07
* Copyright (c) 2010-2014, Wei-ju Wu.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of Wei-ju Wu nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*/
package org.zmpp.zcode
import javax.swing._
import javax.swing.text.StyleConstants
import javax.swing.text.MutableAttributeSet
import java.awt.{FlowLayout,BorderLayout,GridLayout,Color,Font,Dimension,Graphics2D}
import java.awt.{Rectangle}
import java.awt.event._
import java.io.{FileOutputStream, FileInputStream}
import scala.collection.JavaConversions._
import java.util.concurrent.{Executors, TimeUnit}
/*
* Implementation of the standard screen model using Swing components.
*/
// The V1-V3 status bar. This is implemented as a separate component
// because there is at least 1 game (Seastalker) which has both status
// bar and two windows
class StatusBar extends JPanel(new GridLayout(1, 2)) {
private val objectLabel = new JLabel(" ")
private val scoreLabel = new JLabel(" ")
private val left = new JPanel(new FlowLayout(FlowLayout.LEFT))
private val right = new JPanel(new FlowLayout(FlowLayout.RIGHT))
this.add(left)
this.add(right)
left.add(objectLabel)
right.add(scoreLabel)
def set(objectName: String, scoreOrTime: String) {
objectLabel.setText(objectName)
scoreLabel.setText(scoreOrTime)
}
}
// Define _two_ color tables, one for background colors
// and one for foreground tables. The reason is that some
// games (e.g. Varicella) don't really deal with colors properly.
// They rely on the foreground color being brighter than the foreground
// color. Unfortunately, Varicella also assumes that the default foreground
// color is not black.
object BackgroundColors {
val colorTable = new Array[Color](13)
// current is never accessed and default is set by screen model
colorTable(Colors.Black) = Color.BLACK
colorTable(Colors.Red) = new Color(200, 0, 0)
colorTable(Colors.Green) = new Color(0, 200, 0)
colorTable(Colors.Yellow) = new Color(200, 200, 0)
colorTable(Colors.Blue) = new Color(0, 0, 200)
colorTable(Colors.Magenta) = new Color(200, 0, 200)
colorTable(Colors.Cyan) = new Color(0, 200, 200)
colorTable(Colors.White) = new Color(255, 255, 255)
def apply(colornum: Int): Color = {
colorTable(colornum)
}
def setDefault(colornum: Int) {
colorTable(Colors.Default) = colorTable(colornum)
}
}
object ForegroundColors {
val Black = new Color(60, 60, 60)
val colorTable = new Array[Color](13)
// current is never accessed and default is set by screen model
colorTable(Colors.Black) = Black
colorTable(Colors.Red) = new Color(255, 0, 0)
colorTable(Colors.Green) = new Color(0, 255, 0)
colorTable(Colors.Yellow) = new Color(255, 255, 0)
colorTable(Colors.Blue) = new Color(0, 0, 255)
colorTable(Colors.Magenta) = new Color(255, 0, 255)
colorTable(Colors.Cyan) = new Color(0, 255, 255)
colorTable(Colors.White) = new Color(200, 200, 200)
def apply(colornum: Int): Color = {
colorTable(colornum)
}
def setDefault(colornum: Int) {
colorTable(Colors.Default) = colorTable(colornum)
}
}
// A class to implement the top window. In Swing, the top window sits
// in the glass pane. This is done to implement the tricky behaviour
// of the Z-machine screen model of overlaying bottom window in a
// possibly overlapping way.
// The standard layout containers either don't overlap or do not
// show the stacked components below. This is also one of the reasons
// why the Glk can't fully implement the Z-machine screen model, V6
// is a another story.
class TextGrid extends JTextPane with ScreenModelWindow {
private var numLines = 0
private var _cursorPos = (1, 1)
// Even though the text grid is a non-buffered window type, we still buffer
// it up to prepare for resizing of the main window
var buffer: TextGridBuffer = null
var totalLines = 0
var charsPerLine = 0
// we need access to the screen model to access
var screenModel : SwingScreenModelStd = null
setOpaque(false)
def windowSize = numLines
def windowSize_=(numLines: Int) {
this.numLines = numLines
}
def cursorPosition = _cursorPos
def cursorPosition_=(pos: (Int, Int)) = {
// TODO: check boundaries, if outside set to column 1 of current line
//printf("@set_cursor -> (%d, %d)\\n", pos._1, pos._2)
_cursorPos = pos
}
def putChar(c: Char) {
//printf("TopWindow.putChar at pos: (%d, %d): '%c'\\n",
// _cursorPos._1, _cursorPos._2, c)
if (c == '\\n') moveCursorToNextLine
else {
val col = _cursorPos._2
val line = _cursorPos._1
// new style
buffer(line - 1, col - 1) = screenModel.styleCharacter(c)
if (col < charsPerLine) _cursorPos = (line, col + 1)
}
}
private def moveCursorToNextLine {
val nextLine = math.min(numLines, _cursorPos._1 + 1)
_cursorPos = (nextLine, 0)
}
def clear {
//println("TOPWINDOW.CLEAR()")
buffer.fillGridWith(TextStyles.DefaultFixedBlank, 0)
}
def reset {
val currentSize: Dimension = getSize
val fontMetrics = getGraphics.getFontMetrics(getFont)
charsPerLine = currentSize.width / fontMetrics.charWidth('0')
totalLines = currentSize.height / fontMetrics.getHeight
buffer = new TextGridBuffer(totalLines, charsPerLine)
//printf("SCREEN SIZE: %d LINES %d COLS\\n", totalLines, charsPerLine)
clear
}
def flush: Boolean = {
val doc = getDocument
doc.remove(0, doc.getLength)
var row = 0
var col = 0
val attrs = getInputAttributes
while (row < totalLines) {
col = 0
while (col < charsPerLine) {
val styledChar = buffer(row, col)
if (styledChar == TextStyles.DefaultFixedBlank) {
screenModel.setTransparentAttributeSet(attrs)
}
else screenModel.setAttributeSet(attrs, styledChar)
doc.insertString(
doc.getLength,
((styledChar >>> 16) & 0xffff).asInstanceOf[Char].toString,
attrs)
col += 1
}
doc.insertString(doc.getLength, "\\n", null)
row += 1
}
true
}
}
object TextBuffer {
val MarginLeft = 20
val MarginRight = 20
val MarginTop = 10
val MarginBottom = 10
}
object TextInputMode extends Enumeration {
val InputNone = Value("None")
val ReadLine = Value("ReadLine")
val ReadChar = Value("ReadChar")
}
class TextBuffer(screenModel: SwingScreenModelStd)
extends JTextPane with ScreenModelWindow with KeyListener {
setMargin(new java.awt.Insets(TextBuffer.MarginTop,
TextBuffer.MarginLeft,
TextBuffer.MarginBottom,
TextBuffer.MarginRight))
addKeyListener(this)
val runBuffer = new TextRunBuffer
var inputMode = TextInputMode.InputNone
var useBufferMode = true
def isCharInputMode = inputMode == TextInputMode.ReadChar
def isLineInputMode = inputMode == TextInputMode.ReadLine
private var inputStart = 0
private var maxInputChars = 0
def reset {
inputMode = TextInputMode.InputNone
runBuffer.reset
inputStart = 0
maxInputChars = 0
clear
}
def setStyle(style: Int) = runBuffer.setStyle(style)
def setFont(fontnum: Int) = runBuffer.setFont(fontnum)
def setColor(foreground: Int, background: Int) = runBuffer.setColor(foreground, background)
private def attributeSetFor(style: Int) = {
val attrs = getInputAttributes
screenModel.attributeSetFor(attrs, style)
attrs
}
private def numRows = {
val g2d = getGraphics.asInstanceOf[Graphics2D]
val lineMetrics = screenModel.stdFont.getLineMetrics("0", g2d.getFontRenderContext)
(getHeight / lineMetrics.getHeight).toInt + 1
}
def clear {
val clearScreenBuilder = new StringBuilder()
// preserve the current style when doing a clear !
runBuffer.clear
//println("Bottom Window has " + numRows + " rows.")
(1 to numRows).foreach(_ => clearScreenBuilder.append('\\n'))
setText(clearScreenBuilder.toString)
setBackground(screenModel.backgroundColor)
setCaretToEnd(0)
}
def setCaretToEnd(numLeftOverChars: Int) {
inputStart = getDocument.getLength
setCaretPosition(inputStart)
inputStart -= numLeftOverChars
}
def putChar(c: Char) {
runBuffer.append(c)
}
def flush: Boolean = {
val doc = getDocument
val styledRuns = runBuffer.grabRuns
for (styledRun <- styledRuns) {
doc.insertString(doc.getLength, styledRun.text, attributeSetFor(styledRun.style))
}
true
}
// ****** KeyListener ******
def keyPressed(event: KeyEvent) {
import KeyEvent._
if (isCharInputMode) {
val keyChar = event.getKeyChar
if (keyChar == VK_ENTER) {
screenModel.resumeWithCharInput(13)
} else if (keyChar != CHAR_UNDEFINED) {
screenModel.resumeWithCharInput(keyChar)
} else {
val keyCode = event.getKeyCode
keyCode match {
case VK_ENTER => screenModel.resumeWithCharInput(13)
case VK_UP => screenModel.resumeWithCharInput(129)
case VK_DOWN => screenModel.resumeWithCharInput(130)
case VK_LEFT => screenModel.resumeWithCharInput(131)
case VK_RIGHT => screenModel.resumeWithCharInput(132)
case VK_F1 => screenModel.resumeWithCharInput(133)
case VK_F2 => screenModel.resumeWithCharInput(134)
case VK_F3 => screenModel.resumeWithCharInput(135)
case VK_F4 => screenModel.resumeWithCharInput(136)
case VK_F5 => screenModel.resumeWithCharInput(137)
case VK_F6 => screenModel.resumeWithCharInput(138)
case VK_F7 => screenModel.resumeWithCharInput(139)
case VK_F8 => screenModel.resumeWithCharInput(140)
case VK_F9 => screenModel.resumeWithCharInput(141)
case VK_F10 => screenModel.resumeWithCharInput(142)
case VK_F11 => screenModel.resumeWithCharInput(143)
case VK_F12 => screenModel.resumeWithCharInput(144)
case VK_NUMPAD0 => screenModel.resumeWithCharInput(145)
case VK_NUMPAD1 => screenModel.resumeWithCharInput(146)
case VK_NUMPAD2 => screenModel.resumeWithCharInput(147)
case VK_NUMPAD3 => screenModel.resumeWithCharInput(148)
case VK_NUMPAD4 => screenModel.resumeWithCharInput(149)
case VK_NUMPAD5 => screenModel.resumeWithCharInput(150)
case VK_NUMPAD6 => screenModel.resumeWithCharInput(151)
case VK_NUMPAD7 => screenModel.resumeWithCharInput(152)
case VK_NUMPAD8 => screenModel.resumeWithCharInput(153)
case VK_NUMPAD9 => screenModel.resumeWithCharInput(154)
}
}
} else if (isLineInputMode) {
val doc = getDocument
val caretPos = getCaret.getDot
if (caretPos < inputStart) getCaret.setDot(doc.getLength)
if (event.getKeyCode == KeyEvent.VK_ENTER) {
event.consume
val input = doc.getText(inputStart, doc.getLength - inputStart)
doc.insertString(doc.getLength, "\\n", null)
println("Input was: " + input)
screenModel.resumeWithLineInput(input + "\\n")
inputMode = TextInputMode.InputNone
} else if (event.getKeyCode == KeyEvent.VK_BACK_SPACE ||
event.getKeyCode == KeyEvent.VK_LEFT) {
if (getCaret.getDot <= inputStart) event.consume
} else if (event.getKeyCode == KeyEvent.VK_UP) {
event.consume
} else if (doc.getLength - inputStart >= maxInputChars) {
// eat the non-visible characters that go over the text buffer
event.consume
}
} else {
// no input mode, eat the key event
event.consume
}
}
def keyTyped(event: KeyEvent) {
if (isCharInputMode) {
event.consume
} else if (!isLineInputMode) {
// not in input mode, eat all key events
event.consume
} else if (getDocument.getLength - inputStart >= maxInputChars) {
// we need to consume twice in order to eat the visible characters
// in line input mode
event.consume
}
}
def keyReleased(event: KeyEvent) {}
// input
// has to be called in UI event thread
def requestLineInput(maxChars: Int, numLeftOverChars: Int) {
//println("requestLineInput")
requestFocusInWindow
getCaret.setVisible(true)
setCaretToEnd(numLeftOverChars)
maxInputChars = maxChars
inputMode = TextInputMode.ReadLine
}
def requestCharInput {
requestFocusInWindow
getCaret.setVisible(true)
inputMode = TextInputMode.ReadChar
}
def cursorPosition = {
throw new UnsupportedOperationException("@get_cursor not supported for bottom window")
}
def cursorPosition_=(pos: (Int, Int)) = {
throw new UnsupportedOperationException("@set_cursor not supported for bottom window")
}
}
class InterruptTask(vm: Machine, screenModel: ScreenModel,
time: Int, routine: Int) {
private[this] val scheduler = Executors.newScheduledThreadPool(1)
val runner = new Runnable {
def run {
val oldfp = vm.state.fp
vm.callInterrupt(routine)
while (vm.state.fp != oldfp) {
vm.doInstruction(false)
}
screenModel.flushInterruptOutput
if (vm.state.thrownAwayValue == 1) {
scheduler.shutdown
screenModel.cancelInput
}
}
}
def shutdown = scheduler.shutdown
def start {
val future = scheduler.scheduleAtFixedRate(runner, 0,
time * 100,
TimeUnit.MILLISECONDS)
}
def await {
scheduler.awaitTermination(50, TimeUnit.SECONDS)
}
}
/*
* Standard screen model for all versions except 6.
*/
class SwingScreenModelStd(topWindow: TextGrid,
var DefaultBackground: Int = Colors.White,
var DefaultForeground: Int = Colors.Black)
extends JPanel(new BorderLayout)
with OutputStream with InputStream with SwingScreenModel with FocusListener {
import ScrollPaneConstants._
import ScreenModel._
var vm: Machine = null
var activeWindowId = BottomWindow // 0 is the bottom window, 1 is the top window
var currentBackground = DefaultBackground
var currentForeground = DefaultForeground
var style = TextStyles.Roman
var currentFont = Fonts.Normal
val fixedFont = new Font("Courier New", Font.PLAIN, 14)
val stdFont = new Font("American Typewriter", Font.PLAIN, 14)
val statusBar = new StatusBar
val mainPane = new JPanel(new BorderLayout)
val bottomWindow = new TextBuffer(this)
val scrollPane = new JScrollPane(bottomWindow, VERTICAL_SCROLLBAR_NEVER,
HORIZONTAL_SCROLLBAR_NEVER)
// for timed input
private[this] var interruptTask: InterruptTask = null
ForegroundColors.setDefault(DefaultForeground)
BackgroundColors.setDefault(DefaultBackground)
scrollPane.setPreferredSize(new Dimension(640, 480))
mainPane.add(scrollPane, BorderLayout.CENTER)
add(mainPane, BorderLayout.CENTER)
topWindow.addFocusListener(this)
topWindow.screenModel = this
def activeWindow: ScreenModelWindow = activeWindowId match {
case BottomWindow => bottomWindow
case _ => topWindow
}
def connect(aVm: Machine) {
vm = aVm
remove(statusBar)
if (vm.version <= 3) {
add(statusBar, BorderLayout.NORTH)
}
if (vm.state.header.isBeyondZork) {
// Beyond Zork only works well with White on Black !!!
DefaultBackground = Colors.Black
DefaultForeground = Colors.White
ForegroundColors.setDefault(DefaultForeground)
BackgroundColors.setDefault(DefaultBackground)
}
}
def capabilities = List(SupportsColors, SupportsBoldFont, SupportsItalicFont,
SupportsFixedFont, SupportsTimedInput, SupportsSound,
SupportsScreenSplit, SupportsMouse)
import TextStyles._
private[this] var selected = true
def isSelected = selected
def select(flag: Boolean) = selected = flag
def putChar(c: Char) {
if (SwingUtilities.isEventDispatchThread) _putChar(c)
else {
SwingUtilities.invokeAndWait(new Runnable {
def run = _putChar(c)
})
}
}
def _putChar(c: Char) = activeWindow.putChar(vm.state.encoding.zsciiToUnicode(c))
def _flush = {
topWindow.flush
bottomWindow.flush
}
def flush {
if (SwingUtilities.isEventDispatchThread) _flush
else {
SwingUtilities.invokeAndWait(new Runnable {
def run = _flush
})
}
}
def flushInterruptOutput {
SwingUtilities.invokeAndWait(new Runnable {
def run = {
_flush
if (bottomWindow.isLineInputMode) {
bottomWindow.setCaretToEnd(vm.readLineInfo.numLeftOverChars)
}
}
})
}
def updateStatusLine {
val objectName = vm.statusLineObjectName
val scoreOrTime = vm.statusLineScoreOrTime
statusBar.set(objectName, scoreOrTime)
}
// input
def readLine: Int = {
flush
val maxChars = vm.readLineInfo.maxInputChars
//println("MAX_CHARS FOR READLINE: " + maxChars)
if (vm.version <= 3) updateStatusLine
scrollPane.getViewport.scrollRectToVisible(bottomRectangle)
bottomWindow.requestLineInput(maxChars,
vm.readLineInfo.numLeftOverChars)
startInterruptTaskIfNeeded(vm.readLineInfo.routine,
vm.readLineInfo.time)
0
}
private def bottomRectangle: Rectangle = {
val right = bottomWindow.getWidth
val bottom = bottomWindow.getHeight
new Rectangle(0, bottom - 10, right, 10)
}
def readChar {
if (vm.version <= 3) updateStatusLine
flush
bottomWindow.requestCharInput
startInterruptTaskIfNeeded(vm.readCharInfo.routine, vm.readCharInfo.time)
}
private def startInterruptTaskIfNeeded(routine: Int, time: Int) {
if (vm.version >= 4 && routine > 0 && time > 0) {
interruptTask = new InterruptTask(vm, this, time, routine)
interruptTask.start
}
}
def cancelInput {
// when this is called, the scheduler is guaranteed to not
// execute any more interrupts
if (interruptTask != null) {
interruptTask = null
}
if (vm.state.runState == ZMachineRunStates.ReadChar) {
vm.resumeWithCharInput(0)
} else if (vm.state.runState == ZMachineRunStates.ReadLine) {
vm.resumeWithLineInput("")
}
ExecutionControl.executeTurn(vm, this)
}
def resumeWithLineInput(input: String) {
if (interruptTask != null) {
interruptTask.shutdown
interruptTask.await
interruptTask = null
}
vm.resumeWithLineInput(input)
ExecutionControl.executeTurn(vm, this)
}
def resumeWithCharInput(keyCode: Int) {
//println("RESUME WITH " + keyCode)
if (interruptTask != null) {
interruptTask.shutdown
interruptTask.await
interruptTask = null
}
vm.resumeWithCharInput(keyCode)
ExecutionControl.executeTurn(vm, this)
}
def screenOutputStream = this
def keyboardStream = this
def screenModel = this
def splitWindow(lines: Int) {
//println("@split_window, lines = " + lines)
topWindow.windowSize = lines
if (vm.version == 3) topWindow.clear
}
def setWindow(windowId: Int) {
//println("@set_window, window id = " + windowId)
if (windowId == BottomWindow || windowId == TopWindow) {
activeWindowId = windowId
} else {
throw new IllegalArgumentException(
"@set_window illegal window: %d".format(windowId))
}
}
def cursorPosition: (Int, Int) = activeWindow.cursorPosition
def setCursorPosition(line: Int, column: Int) {
//printf("@set_cursor, line = %d, col = %d, active window: %d\\n", line, column, activeWindowId)
activeWindow.cursorPosition = (line, column)
}
def bufferMode(flag: Int) {
//println("@buffer_mode, flag = " + flag)
bottomWindow.useBufferMode = (flag != 0)
}
def eraseWindow(windowId: Int) {
// TODO: polymorphism might make this prettier and shorter
//println("@erase_window, win = " + windowId)
if (windowId == -1) {
topWindow.windowSize = 0
topWindow.clear
bottomWindow.clear
} else if (windowId == -2) {
topWindow.clear
bottomWindow.clear
} else if (windowId == TopWindow || windowId == 3 && activeWindowId == TopWindow) {
topWindow.clear
} else if (windowId == BottomWindow || windowId == 3 && activeWindowId == BottomWindow) {
bottomWindow.clear
}
}
def eraseLine(value: Int) {
printf("@erase_line %d not implemented yet (TODO)\\n", value)
}
def setTextStyle(aStyle: Int) {
//printf("@set_style: %d\\n", aStyle)
bottomWindow.setStyle(aStyle)
style = aStyle
}
// Note: window parameter is only relevant for V6
// The method is called "setColour" only to express that it
// implements the Z-instruction
def setColour(foreground: Int, background: Int, window: Int) {
//printf("setColour(), foreground = %d, background = %d\\n",
// foreground, background)
bottomWindow.setColor(foreground, background)
// Make sure that we don't end up in an infinite loop
// This could happen when we set currentForeground/currentBackground to CURRENT
if (foreground != Colors.Current) currentForeground = foreground
if (background != Colors.Current) currentBackground = background
// we need to change the caret color of the bottom window, too
//println("setting caret color")
if (isReverseVideo(this.style)) {
//println("reverse")
bottomWindow.setCaretColor(getColor(background, false))
} else {
//println("normal video")
val color = getColor(foreground, true)
//println("color will be: " + color)
bottomWindow.setCaretColor(color)
}
//println("exiting setColour")
}
private def getColor(colorId: Int, isForeground: Boolean): Color = {
colorId match {
case Colors.Current =>
if (isForeground) getColor(currentForeground, true) else getColor(currentBackground, false)
case _ =>
if (isForeground) ForegroundColors(colorId) else BackgroundColors(colorId)
}
}
def backgroundColor = getColor(currentBackground, false)
def textColor = getColor(currentForeground, true)
def setFont(font: Int): Int = {
if (isFontSupported(font)) {
val previousFont = currentFont
currentFont = font
bottomWindow.setFont(font)
previousFont
} else 0
}
private def isFontSupported(font: Int): Boolean = {
font == Fonts.Normal || font == Fonts.Fixed
}
def styleCharacter(c: Char) = {
TextStyles.styleChar(c,
TextStyles.makeStyle(style,
Fonts.Fixed,
currentForeground,
currentBackground))
}
val Transparent = new Color(0, 0, 0, 0)
def setTransparentAttributeSet(attrs: MutableAttributeSet) = {
StyleConstants.setBackground(attrs, Transparent)
}
def setAttributeSet(attrs: MutableAttributeSet, styledChar: Int) = {
StyleConstants.setBold(attrs,
TextStyles.isBold(styledChar))
StyleConstants.setItalic(attrs,
TextStyles.isItalic(styledChar))
if (TextStyles.isReverseVideo(styledChar)) {
StyleConstants.setBackground(
attrs,
getColor(TextStyles.foregroundColor(styledChar), true))
StyleConstants.setForeground(
attrs,
getColor(TextStyles.backgroundColor(styledChar), false))
} else {
StyleConstants.setForeground(
attrs,
getColor(TextStyles.foregroundColor(styledChar), true))
StyleConstants.setBackground(
attrs,
getColor(TextStyles.backgroundColor(styledChar), false))
}
}
def attributeSetFor(attrs: MutableAttributeSet, style: Int) = {
StyleConstants.setBold(attrs, TextStyles.isBold(style))
StyleConstants.setItalic(attrs, TextStyles.isItalic(style))
if (TextStyles.isReverseVideo(style)) {
//println("BOTTOM RUN - REVERSE VIDEO")
StyleConstants.setBackground(attrs,
getColor(TextStyles.foregroundColor(style),
true))
StyleConstants.setForeground(attrs,
getColor(TextStyles.backgroundColor(style),
false))
} else {
/*
printf("BOTTOM RUN - REGULAR VIDEO FG = %d, BG = %d\\n",
TextStyles.foregroundColor(style),
TextStyles.backgroundColor(style))*/
StyleConstants.setForeground(attrs,
getColor(TextStyles.foregroundColor(style),
true))
StyleConstants.setBackground(attrs,
getColor(TextStyles.backgroundColor(style),
false))
}
if (currentFont == Fonts.Normal) {
// TODO
} else if (currentFont == Fonts.Fixed) {
// TODO
} else if (currentFont == Fonts.Picture) {
throw new UnsupportedOperationException("Picture font not supported")
} else if (currentFont == Fonts.CharacterGfx) {
// TODO
throw new UnsupportedOperationException("Character GFX font not supported")
}
attrs
}
def initUI {
topWindow.setFont(fixedFont)
bottomWindow.setFont(stdFont)
topWindow.reset
bottomWindow.reset
// now the top window "knows" how large the screen is, so we can set
// the dimensions and font sizes to the VM
vm.setFontSizeInUnits(1, 1)
vm.setScreenSizeInUnits(topWindow.charsPerLine, topWindow.totalLines)
}
// For now, this is just an approximation, we just prevent that
// the cursor is caught in the glass pane while we actually want to have
// input focus in the bottom window in the normal case.
// E.g. "Deadline" will not look as good with this approach, a better one
// would take into account how large the top window is and allow focus
// in case the cursor is set in the visible area of the top window
def focusGained(e: FocusEvent) = bottomWindow.requestFocusInWindow
def focusLost(e: FocusEvent) { }
def requestSaveFile {
val fileChooser = new JFileChooser
fileChooser.setDialogTitle("Save Game As...")
val outputStream = if (fileChooser.showSaveDialog(this) == JFileChooser.APPROVE_OPTION) {
new FileOutputStream(fileChooser.getSelectedFile)
} else null
vm.resumeWithSaveStream(outputStream)
ExecutionControl.executeTurn(vm, this)
}
def requestRestoreFile {
val fileChooser = new JFileChooser
fileChooser.setDialogTitle("Restore Game From...")
val inputStream = if (fileChooser.showOpenDialog(this) == JFileChooser.APPROVE_OPTION) {
new FileInputStream(fileChooser.getSelectedFile)
} else null
vm.resumeWithRestoreStream(inputStream)
ExecutionControl.executeTurn(vm, this)
}
}
| weiju/zmpp2 | zmpp-zcode/src/main/scala/org/zmpp/zcode/SwingScreenModelStd.scala | Scala | bsd-3-clause | 28,574 |
package amora.backend.indexer
import org.junit.Test
import amora.converter.protocol.Artifact
import amora.converter.protocol.Project
class ScalaDeclTest extends RestApiTest {
@Test
def classes() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/Class/>
select * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
package a.b.c
class [[A]]
class [[B_?]]
class [[??]]
class [[`hello world`]]
object O
abstract class AC
""")
}
@Test
def abstract_classes() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/AbstractClass/>
select * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
package a.b.c
class A
object O
abstract class [[AC]] {}
""")
}
@Test
def classes_with_body() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/Class/>
select * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
package a.b.c
class [[A]] {}
class [[B_?]] {
def f = 0
}
class [[??]] { /* comment*/ }
class [[`hello world`]] {
def g = 0
}
""")
}
@Test
def objects() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/Object/>
select * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
package a.b.c
object [[`x y`]] {
def g = 0
}
""")
}
@Test
def traits() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/Trait/>
select * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
package a.b.c
trait [[B]] {}
""")
}
@Test
def packages() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/Package/>
select * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
package [[a]].[[b]].[[c]]
class A
""")
}
@Test
def defs() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/Def/>
select * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
class [[!this]]A {
def [[meth]] = 0
def [[meth2]] = {
def [[meth3]] = {
def [[meth4]] = 0
meth4
}
meth3
}
}
""")
}
@Test
def vals() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/Val/>
select * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
class A {
val [[v1]] = 0
val [[v2]] = {
val [[v3]] = {
val [[v4]] = 0
v4
}
v3
}
}
""")
}
@Test
def vars() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/Var/>
select * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
class A {
var [[v1]] = 0
var [[v2]] = {
var [[v3]] = {
var [[v4]] = 0
v4
}
v3
}
}
""")
}
@Test
def lazy_vals() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/LazyVal/>
select * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
class A {
lazy val [[v1]] = 0
lazy val [[v2]] = {
lazy val [[v3]] = {
lazy val [[v4]] = 0
v4
}
v3
}
}
""")
}
@Test
def private_class_parameters() = {
indexRegionData("""
prefix param:<http://amora.center/kb/amora/Flag/param>
prefix c:<http://amora.center/kb/amora/Schema/Val/>
select distinct ?name ?start ?end where {
[c:flag param:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
class A([[value1]]: Int, [[`second val`]]: String)
""")
}
@Test
def method_parameters() = {
indexRegionData("""
prefix param:<http://amora.center/kb/amora/Flag/param>
prefix c:<http://amora.center/kb/amora/Schema/Val/>
select * where {
[c:flag param:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
class X {
def f([[param1]]: Int)([[`p a r a m`]]: String)([[p]]: Int) = 0
}
""")
}
@Test
def multiple_lambda_decls() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/Val/>
select * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
class X {
def f([[i]]: Int ⇒ Int) = i
f([[v]] ⇒ v)
f([[v]] ⇒ v)
}
""")
}
@Test
def default_constructor() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/Decl/>
select * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
class [[!this]][[X]]
""")
}
@Test
def constructor_with_parameter() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/Decl/>
select distinct * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
class [[!this]][[X]]([[value]]: Int)
""")
}
@Test
def auxiliary_constructor() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/Decl/>
select * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
class [[!this]][[X]] {
def [[this]]([[value]]: Int) {
this()
}
}
""")
}
@Test
def type_alias_with_type_parameter() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/Decl/>
select * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
class [[!this]][[X]] {
type [[Type]] [ [[A]] , [[B]] ] = Map[A, B]
}
""")
}
@Test
def self_ref() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/Decl/>
select * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
class [[!this]][[X]] { [[selfRef]] ⇒
}
""")
}
@Test
def self_ref_with_parent() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/Decl/>
select * where {
[a c:] c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
class [[!this]][[X]] {
[[selfRef]]: scala.collection.mutable.AbstractSet[Int] ⇒
}
""")
}
@Test
def class_and_object() = {
indexRegionData("""
prefix c:<http://amora.center/kb/amora/Schema/Decl/>
prefix o:<http://amora.center/kb/amora/Schema/Object/>
select ?name ?start ?end where {
[c:name "value"] c:owner ?owner .
?owner a o: ; c:name ?name ; c:posStart ?start ; c:posEnd ?end .
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
class X {
val value = 0
}
object [[X]] {
val value = 0
}
""")
}
@Test
def decl_in_scope_has_an_owner() = {
indexRegionData("""
prefix Decl:<http://amora.center/kb/amora/Schema/Decl/>
select * where {
?d a Decl: ; Decl:name ?name ; Decl:posStart ?start ; Decl:posEnd ?end .
filter not exists {
?d Decl:owner ?o .
}
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
class X {
def f(i: Int) = i match {
case i ⇒ i
}
}
""")
}
@Test
def decl_in_extractor_scope_has_an_owner() = {
indexRegionData("""
prefix Decl:<http://amora.center/kb/amora/Schema/Decl/>
select * where {
?d a Decl: ; Decl:name ?name ; Decl:posStart ?start ; Decl:posEnd ?end .
filter not exists {
?d Decl:owner ?o .
}
}
""",
Artifact(Project("p"), "o", "n", "v1"),
"x.scala" → """
class X {
def f(o: Option[Int]) = o match {
case Some(i) ⇒ i
}
}
""")
}
}
| sschaef/tooling-research | backend/src/test/scala/amora/backend/indexer/ScalaDeclTest.scala | Scala | mit | 10,252 |
package io.circe.test
import io.circe.Json
import org.scalacheck.{ Arbitrary, Gen }
trait ArbitraryInstances {
private[this] def maxDepth: Int = 5
private[this] def maxSize: Int = 20
private[this] def genNull: Gen[Json] = Gen.const(Json.empty)
private[this] def genBool: Gen[Json] = Arbitrary.arbBool.arbitrary.map(Json.bool)
private[this] def genNumber: Gen[Json] = Gen.oneOf(
Arbitrary.arbLong.arbitrary.map(Json.long),
Arbitrary.arbDouble.arbitrary.map(Json.numberOrNull)
)
private[this] def genString: Gen[Json] = Arbitrary.arbString.arbitrary.map(Json.string)
private[this] def genArray(depth: Int): Gen[Json] = Gen.choose(0, maxSize).flatMap { size =>
Gen.listOfN(
size,
arbitraryJsonAtDepth(depth + 1).arbitrary
).map(Json.array)
}
private[this] def genObject(depth: Int): Gen[Json] = Gen.choose(0, maxSize).flatMap { size =>
Gen.listOfN(
size,
for {
k <- Arbitrary.arbString.arbitrary
v <- arbitraryJsonAtDepth(depth + 1).arbitrary
} yield k -> v
).map(Json.obj)
}
private[this] def arbitraryJsonAtDepth(depth: Int): Arbitrary[Json] = {
val genJsons = List( genNumber, genString) ++ (
if (depth < maxDepth) List(genArray(depth), genObject(depth)) else Nil
)
Arbitrary(Gen.oneOf(genNull, genBool, genJsons: _*))
}
implicit def arbitraryJson: Arbitrary[Json] = arbitraryJsonAtDepth(0)
}
| groz/circe | core/shared/src/test/scala/io/circe/test/ArbitraryInstances.scala | Scala | apache-2.0 | 1,418 |
package org.template.classification
import io.prediction.controller.P2LAlgorithm
import io.prediction.controller.Params
import org.apache.spark.SparkContext
import org.apache.spark.mllib.classification.NaiveBayes
import org.apache.spark.mllib.classification.NaiveBayesModel
import org.apache.spark.mllib.linalg.Vectors
case class NaiveBayesAlgorithmParams(
lambda: Double
) extends Params
// extends P2LAlgorithm because the MLlib's NaiveBayesModel doesn't contain RDD.
class NaiveBayesAlgorithm(val ap: NaiveBayesAlgorithmParams)
extends P2LAlgorithm[PreparedData, NaiveBayesModel, Query, PredictedResult] {
def train(sc: SparkContext, data: PreparedData): NaiveBayesModel = {
NaiveBayes.train(data.labeledPoints, ap.lambda)
}
def predict(model: NaiveBayesModel, query: Query): PredictedResult = {
val label = model.predict(Vectors.dense(query.features))
new PredictedResult(label)
}
}
| beni55/PredictionIO | examples/scala-parallel-classification/add-algorithm/src/main/scala/NaiveBayesAlgorithm.scala | Scala | apache-2.0 | 918 |
package test
import org.opencv.core.MatOfKeyPoint
import org.opencv.core.Mat
import org.opencv.features2d.FeatureDetector
import org.opencv.features2d.DescriptorExtractor
import org.opencv.highgui.Highgui
import org.opencv.features2d.DescriptorMatcher
import org.opencv.core.MatOfDMatch
import scala.collection.JavaConverters._
object BetterMatcher {
def run(trainImgs: Array[String], pathToImgQuery: String) = {
def detectAndExtract(mat: Mat) = {
val keyPoints = new MatOfKeyPoint
val detector = FeatureDetector.create(FeatureDetector.ORB)
detector.detect(mat, keyPoints)
//println(s"There were ${keyPoints.toArray.size} KeyPoints detected")
val bestKeyPoints: MatOfKeyPoint = new MatOfKeyPoint(keyPoints.toArray: _*)
val extractor = DescriptorExtractor.create(DescriptorExtractor.ORB)
val descriptors = new Mat
extractor.compute(mat, bestKeyPoints, descriptors)
//println(s"${descriptors.rows} descriptors were extracted, each with dimension ${descriptors.cols}")
(bestKeyPoints, descriptors)
}
val queryImage = Highgui.imread(pathToImgQuery)
val (leftKeyPoints, leftDescriptors) = detectAndExtract(queryImage)
val trainList: java.util.List[Mat] = new java.util.ArrayList()
trainImgs.foreach { img =>
val imgMat = Highgui.imread(img)
val (imgKeyPoints, imgDescriptors) = detectAndExtract(imgMat)
trainList.add(imgDescriptors)
}
val matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE)
val dmatches = new MatOfDMatch
matcher.add(trainList)
matcher.train()
matcher.`match`(leftDescriptors, dmatches)
dmatches.toList().asScala.sortWith((e1, e2) => e1.distance < e2.distance).take(200).foreach(println)
val distances = dmatches.toArray().map(x => x.distance)
val count = distances.length
val mean = distances.sum / count
println(mean)
}
} | flaviusanton/logo-detection | src/main/scala/test/BetterMatcher.scala | Scala | mit | 1,943 |
package pl.pholda.malpompaaligxilo.examples.i18n
import pl.pholda.malpompaaligxilo.Context
import pl.pholda.malpompaaligxilo.form.field._
import pl.pholda.malpompaaligxilo.form.field.calculateField.cost.CostDef.SingleCostDef
import pl.pholda.malpompaaligxilo.form.field.calculateField.cost.CostValue.{ComplexCostValue, MultipleCostValue, SingleCostValue}
import pl.pholda.malpompaaligxilo.form.field.calculateField.cost.{CostValue, CostsField}
import pl.pholda.malpompaaligxilo.form.{FormExpr, FormInstance, Field, FormSpecification}
import pl.pholda.malpompaaligxilo.i18n.I18nString
import pl.pholda.malpompaaligxilo.templates.html.costValue
class I18nFormSpec(implicit context: Context) extends FormSpecification {
import context.translationProvider
override def fields: List[Field[_]] = name :: surname :: hasMiddleName :: middleName :: birthDate :: age ::
cost :: Nil
override def id: String = "i18nForm"
val name = Field(
name = "name",
caption = translationProvider.t("Name"),
`type` = StringField(),
required = true
)
val surname = Field(
name = "surname",
caption = translationProvider.t("Surname"),
`type` = StringField(),
required = true
)
val hasMiddleName = Field(
name = "hasMiddleName",
caption = translationProvider.t("Has middle name"),
`type` = CheckboxField(default = false)
)
val middleName = Field(
name = "middleName",
caption = translationProvider.t("Middle name"),
`type` = StringField(),
visible = FormExpr{implicit form =>
hasMiddleName.value.contains(true)
}
)
val birthDate = Field(
name = "birthDate",
caption = translationProvider.t("Date of birth"),
`type` = DateField(),
required = true
)
val age = Field(
name = "age",
caption = translationProvider.t("Age"),
`type` = CustomComputeField[Age](FormExpr{implicit form =>
birthDate.value match {
case Some(bd) =>
Some(Age(form.dates.now.getYear - bd.getYear))
case _ => None
}
}),
visible = FormExpr{ implicit form =>
birthDate.value.nonEmpty
}
)
val cost = Field(
name = "Cost",
caption = "Cost",
`type` = CostsField(
definition = SingleCostDef("age", translationProvider.t("discount for young"), -10, FormExpr{implicit f=>age.value.exists(_.age<10)}),
currencyFormat = "%.2f :-)"
)(
"total = "+_.total
)
)
def costPrinter(costValue: CostValue): I18nString = costValue match {
case SingleCostValue(_, desc, value) =>
desc + ": " + value.toString
case multiple@MultipleCostValue(_, desc, itemCost, items) =>
desc + ": " + (multiple.total + " ("+itemCost+" × "+items+")")
case ComplexCostValue(values) =>
""
}
} | pholda/MalpompaAligxilo | examples/i18nForm/src/main/scala/pl.pholda.malpompaaligxilo.examples.i18n/I18nFormSpec.scala | Scala | gpl-3.0 | 2,776 |
package com.ibm.gpuenabler
import com.ibm.gpuenabler.CUDARDDImplicits._
import com.ibm.gpuenabler.CUDADSImplicits._
import org.apache.spark.SparkEnv
import org.apache.spark.sql.SparkSession
import org.apache.spark.SparkConf
object perfDebug {
def timeit(msg: String, code: => Any): Any ={
val now1 = System.nanoTime
code
val ms1 = (System.nanoTime - now1) / 1000000
println("%s Elapsed time: %d ms".format(msg, ms1))
}
def main(args : Array[String]): Unit = {
val masterURL = if (args.length > 0) args(0) else "local[*]"
val n: Long = if (args.length > 1) args(1).toLong else 1000000L
val part = if (args.length > 2) args(2).toInt else 16
val conf = new SparkConf(false).set("spark.executor.memory", "20g")
val spark = SparkSession.builder().master(masterURL).appName("test").config(conf).getOrCreate()
import spark.implicits._
val sc = spark.sparkContext
val ptxURL = this.getClass.getResource("/GpuEnablerExamples.ptx")
val ptxURL1 = "/GpuEnablerExamples.ptx"
val mapFunction = new CUDAFunction(
"multiplyBy2o",
Array("this"),
Array("this"),
ptxURL)
val dimensions = (size: Long, stage: Int) => stage match {
case 0 => (64, 256)
case 1 => (1, 1)
}
val reduceFunction = new CUDAFunction(
"sumlo",
Array("this"),
Array("this"),
ptxURL,
Seq(),
Some((size: Long) => 2),
Some(dimensions))
val loadFunction = new CUDAFunction(
"load",
Array("this"), Seq(),
ptxURL)
val dataRDD = sc.parallelize(1 to n.toInt, part).map(_.toLong).cache().cacheGpu()
dataRDD.count()
// Load the data to GPU
dataRDD.reduceExtFunc((x1, x2) => x2 , loadFunction)
timeit("RDD: All cached", {
val mapRDD = dataRDD.mapExtFunc((x: Long) => 2 * x, mapFunction).cacheGpu()
val output: Long = mapRDD.reduceExtFunc((x: Long, y: Long) => x + y, reduceFunction)
mapRDD.unCacheGpu()
println("RDD Output is " + output)
})
val dsmapFunction = DSCUDAFunction(
"multiplyBy2",
Array("value"),
Array("value"),
ptxURL1)
val dimensions2 = (size: Long, stage: Int) => stage match {
case 0 => (64, 256, 1, 1, 1, 1)
case 1 => (1, 1, 1, 1, 1, 1)
}
val gpuParams = gpuParameters(dimensions2)
val dsreduceFunction = DSCUDAFunction(
"suml",
Array("value"),
Array("value"),
ptxURL1,
Some((size: Long) => 2),
Some(gpuParams), outputSize=Some(1))
val rd = spark.range(1, n+1, 1, part).cache()
rd.count()
val data = rd.cacheGpu(true)
// Load the data to GPU
timeit("Data load in GPU", { data.loadGpu()})
timeit("DS: All cached", {
val mapDS = data.mapExtFunc(2 * _, dsmapFunction).cacheGpu()
val mapDS1 = mapDS.mapExtFunc(2 * _, dsmapFunction).cacheGpu()
val mapDS2 = mapDS1.mapExtFunc(2 * _, dsmapFunction).cacheGpu()
val output = mapDS2.reduceExtFunc(_ + _, dsreduceFunction)
mapDS.unCacheGpu()
mapDS1.unCacheGpu()
mapDS2.unCacheGpu()
println("Output is " + output)
})
data.unCacheGpu()
val data111 = rd.cacheGpu(true)
// Load the data to GPU
timeit("Data load in GPU", {data111.loadGpu() })
timeit("DS: All cached GPUONLY", {
val mapDS123 = data111.mapExtFunc(2 * _, dsmapFunction).cacheGpu(true)
val output = mapDS123.reduceExtFunc(_ + _, dsreduceFunction)
mapDS123.unCacheGpu()
println(s"Output is $output")
})
data111.unCacheGpu()
val data1 = rd
// Load the data to GPU
timeit("Data load in GPU", {data1.loadGpu() })
timeit("DS: No Cache", {
val mapDS1 = data1.mapExtFunc(2 * _, dsmapFunction)
val output = mapDS1.reduceExtFunc(_ + _, dsreduceFunction)
println("Output is " + output)
})
data1.unCacheGpu()
timeit("DS: CPU", {
val output = data.map(2 * _).reduce(_ + _)
println("Output is " + output)
})
}
}
| IBMSparkGPU/GPUEnabler | examples/src/main/scala/com/ibm/gpuenabler/perfDebug.scala | Scala | apache-2.0 | 3,991 |
// We need to be quite careful not to skew the generator.
// Since `Int.Minvalue` is 1 smaller than `-(Int.MaxValue)`,
// it suffices to increment the negative numbers by 1 and make them positive.
// This maps Int.MinValue to Int.MaxValue and -1 to 0.
def nonNegativeInt(rng: RNG): (Int, RNG) = {
val (i, r) = rng.nextInt
(if (i < 0) -(i + 1) else i, r)
} | ud3sh/coursework | functional-programming-in-scala-textbook/answerkey/state/01.answer.scala | Scala | unlicense | 359 |
package forms.data
/**
* Created by Matthias Braun on 11/18/2015.
*/
//case class FarmerPersonalData(name: String, postAdress: String, telefonNumber: String, emailAdress: String)
//
//case class FarmerCompanyData(companyNr: String, vatRegistrationNr: String, inspectionBody: String, memberInBioUnion:Boolean)
//
//case class BankData(iban: String)
//
//case class HarvestGuessFromForm(contactData: FarmerPersonalData, companyData: FarmerCompanyData, bankData: BankData)
case class FarmerPersonalData(name: String, postalAddress: String, phoneNumber: String, emailAddress: String)
case class HarvestGuessFromForm(contactData: FarmerPersonalData)
| mb720/cvs | app/forms/data/HarvestGuessFromForm.scala | Scala | bsd-2-clause | 651 |
package models.db.systemmanage
import slick.lifted._
/**
* Created by hooxin on 15-1-26.
*/
object SystemManage {
val departments = TableQuery[DepartmentTable]
val users = TableQuery[UserTable]
val dicts = TableQuery[DictTable]
val dictItems = TableQuery[DictItemTable]
val functions = TableQuery[FunctionTable]
val roles = TableQuery[RoleTable]
val roleFuncs = TableQuery[RoleFuncTable]
val menus = TableQuery[MenuTable]
val roleMenus = TableQuery[RoleMenuTable]
val globalParams = TableQuery[GlobalParamTable]
val userRoles = TableQuery[UserRoleTable]
}
| firefoxmmx2/techsupport-extjs4-slick3 | app/models/db/systemmanage/SystemManage.scala | Scala | mit | 583 |
package tight
import java.util.concurrent.ConcurrentHashMap
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
import scala.concurrent.{Await, Future}
object Cooking extends App {
class KVStore[K, V] {
private val s = new ConcurrentHashMap[K, V]()
def create(k: K, v: V): Future[Boolean] = Future.successful(s.putIfAbsent(k, v) == null)
def read(k: K): Future[Option[V]] = Future.successful(Option(s.get(k)))
def update(k: K, v: V): Future[Unit] = Future.successful(s.put(k, v))
def delete(k: K): Future[Boolean] = Future.successful(s.remove(k) != null)
}
//
type FoodName = String
type Quantity = Int
type FoodKV = KVStore[String, Int]
type TestFoodKV = KVStore[String, Int]
def addFood(n: FoodName, q: Quantity): Future[Unit] = for {
current <- fc.read(n)
updated = current.map(c => c + q).getOrElse(q)
_ <- fc.update(n, updated)
} yield ()
def takeFood(n: FoodName, q: Quantity): Future[Quantity] = for {
current <- fc.read(n)
inStock = current.getOrElse(0)
taken = Math.min(inStock, q)
left = inStock - taken
_ <- if (left > 0) fc.update(n, left) else fc.delete(n)
} yield taken
//
def cookSauce(q: Quantity): Future[Quantity]=
for {
tomatoQ <- takeFood("tomato", q)
vegQ <- takeFood("non-tomato veggies", q)
_ <- takeFood("garlic", q*2)
sauceQ = tomatoQ/2 + vegQ*3/4
_ <- addFood("sauce", sauceQ)
} yield sauceQ
def cookPasta(q: Quantity): Future[Quantity]=
for {
pastaQ <- takeFood("pasta", q)
_ <- takeFood("salt", 10)
_ <- addFood("cooked pasta", pastaQ)
} yield pastaQ
//
val test = true
val fc = if (test) new TestFoodKV else new FoodKV
//
val shopping = for {
_ <- addFood("tomato", 10)
_ <- addFood("non-tomato veggies", 15)
_ <- addFood("garlic", 42)
_ <- addFood("salt", 1)
_ <- addFood("pasta", 5)
} yield ()
val cooking = for {
_ <- shopping
sq <- cookSauce(5)
pq <- cookPasta(10)
} yield s"Cooked $sq sauce and $pq pasta"
val eating = Await.result(cooking, 1.minute)
println(eating)
}
| enpassant/miniatures | src/main/scala/tight/TightCoupling.scala | Scala | apache-2.0 | 2,159 |
package TAPLcomp.recon
import scala.util.parsing.combinator.{ImplicitConversions, PackratParsers}
import scala.util.parsing.combinator.syntactical.StandardTokenParsers
sealed trait Ty
case class TyVar(id: String) extends Ty
case class TyArr(t1: Ty, t2: Ty) extends Ty
case object TyBool extends Ty
case object TyNat extends Ty
sealed trait Term
case class TmVar(i: String) extends Term
case object TmTrue extends Term
case object TmFalse extends Term
case class TmIf(cond: Term, t1: Term, t2: Term) extends Term
case object TmZero extends Term
case class TmSucc(t: Term) extends Term
case class TmPred(t: Term) extends Term
case class TmIsZero(t: Term) extends Term
case class TmAbs(v: String, ty: Option[Ty], t: Term) extends Term
case class TmApp(t1: Term, t2: Term) extends Term
object ReconParsers extends StandardTokenParsers with PackratParsers with ImplicitConversions {
lexical.reserved += ("Bool", "true", "false", "if", "then", "else",
"Nat", "String", "Unit", "Float", "unit", "case", "let", "in", "succ", "pred",
"as", "of", "fix", "iszero")
lexical.delimiters += ("\\\\", "(", ")", ";", "/", ".", ":", "->", "=", "<", ">", "{", "}", "=>", "==>", ",", "|")
// lower-case identifier
lazy val lcid: PackratParser[String] = ident ^? { case id if id.charAt(0).isLower => id }
// upper-case identifier
lazy val ucid: PackratParser[String] = ident ^? { case id if id.charAt(0).isUpper => id }
// TYPES
lazy val `type`: PackratParser[Ty] = arrowType
lazy val aType: PackratParser[Ty] =
"(" ~> `type` <~ ")" |
ucid ^^ { tn => TyVar(tn) } |
"Bool" ^^ { _ => TyBool } |
"Nat" ^^ { _ => TyNat }
lazy val fieldTypes: PackratParser[List[(String, Ty)]] =
repsep(fieldType, ",")
lazy val fieldType: PackratParser[(String, Ty)] =
lcid ~ (":" ~> `type`) ^^ { case id ~ ty => (id, ty) }
lazy val arrowType: PackratParser[Ty] =
(aType <~ "->") ~ arrowType ^^ { case t1 ~ t2 => TyArr(t1, t2) } |
aType
lazy val term: PackratParser[Term] =
appTerm |
("if" ~> term) ~ ("then" ~> term) ~ ("else" ~> term) ^^ { case t1 ~ t2 ~ t3 => TmIf(t1, t2, t3) } |
("\\\\" ~> lcid) ~ (":" ~> `type`) ~ ("." ~> term) ^^ { case v ~ ty ~ t => TmAbs(v, Some(ty), t) }
lazy val appTerm: PackratParser[Term] =
appTerm ~ aTerm ^^ { case t1 ~ t2 => TmApp(t1, t2) } |
"succ" ~> aTerm ^^ { t => TmSucc(t) } |
"pred" ~> aTerm ^^ { t => TmPred(t) } |
"iszero" ~> aTerm ^^ { t => TmIsZero(t) } |
aTerm
lazy val aTerm: PackratParser[Term] =
"(" ~> term <~ ")" |
"true" ^^ { _ => TmTrue } |
"false" ^^ { _ => TmFalse } |
lcid ^^ { i => TmVar(i) } |
numericLit ^^ { x => num(x.toInt) }
private def num(x: Int): Term = x match {
case 0 => TmZero
case _ => TmSucc(num(x - 1))
}
def input(s: String) = phrase(term)(new lexical.Scanner(s)) match {
case t if t.successful => t.get
case t => sys.error(t.toString)
}
} | hy-zhang/parser | Scala/Parser/src/TAPLcomp/recon/parser.scala | Scala | bsd-3-clause | 2,970 |
package pl.newicom.dddd.view.sql
import akka.Done
import pl.newicom.dddd.messaging.event.OfficeEventMessage
import pl.newicom.dddd.view.ViewHandler
import slick.dbio.DBIOAction.sequence
import slick.jdbc.JdbcProfile
import scala.concurrent.{ExecutionContext, Future}
class SqlViewHandler(val viewStore: SqlViewStore, override val vuConfig: SqlViewUpdateConfig)
(implicit val profile: JdbcProfile, ex: ExecutionContext)
extends ViewHandler(vuConfig) with FutureHelpers {
import profile.api._
private lazy val viewMetadataDao = new ViewMetadataDao
def viewMetadataId = ViewMetadataId(viewName, vuConfig.office.id)
def handle(eventMessage: OfficeEventMessage, eventNumber: Long): Future[Done] =
viewStore.run {
(sequence(vuConfig.projections.map(_.consume(eventMessage))) >>
viewMetadataDao.insertOrUpdate(viewMetadataId, eventNumber)).transactionally
}.mapToDone
def lastEventNumber: Future[Option[Long]] =
viewStore.run {
viewMetadataDao.lastEventNr(viewMetadataId)
}
}
| AndreyLadniy/akka-ddd | view-update-sql/src/main/scala/pl/newicom/dddd/view/sql/SqlViewHandler.scala | Scala | mit | 1,046 |
package controllers
import javax.inject._
import play.api._
import play.api.mvc._
import play.api.mvc.Results._
import play.api.http.HttpErrorHandler
import scala.concurrent._
class ErrorHandler extends HttpErrorHandler {
def onClientError(request: RequestHeader, statusCode: Int, message: String) = {
Future.successful(
Status(statusCode)("CUSTON - A client error occurred: " + message)
)
}
def onServerError(request: RequestHeader, exception: Throwable) = {
Future.successful(
InternalServerError("CUSTON - A server error occurred: " + exception.getMessage)
)
}
}
@Singleton
class ErrorHandlerController @Inject() extends Controller {
def index = Action {
throw new RuntimeException("It's all worng!")
Ok("Its okay? NO.")
}
} | diegopacheco/scala-playground | play-2.5-scala-app-sandbox/app/controllers/ErrorHandlerController.scala | Scala | unlicense | 804 |
import akka.actor.{ActorSystem, actorRef2Scala}
import akka.testkit.{ImplicitSender, TestActors, TestKit}
import hu.bme.mit.ire.TransactionFactory
import hu.bme.mit.ire.messages.ChangeSet
import hu.bme.mit.ire.util.TestUtil._
import org.scalatest.{BeforeAndAfterAll, Matchers, WordSpecLike}
class TransactionFactoryTest(_system: ActorSystem) extends TestKit(_system) with ImplicitSender
with WordSpecLike with Matchers with BeforeAndAfterAll {
def this() = this(ActorSystem("MySpec"))
override def afterAll {
TestKit.shutdownActorSystem(system)
}
"TransactionFactory" must {
"send incoming data after subscription" in {
val input = new TransactionFactory(messageSize = 4)
val echoActor = system.actorOf(TestActors.echoActorProps)
input.subscribe(Map("test" -> (echoActor ! _)))
val tran = input.newBatchTransaction()
tran.add("test", tuple(6, 1L))
tran.add("test", tuple(6, 2L))
tran.close()
expectMsg(ChangeSet(positive = Vector(tuple(6, 2), tuple(6, 1))))
}
"do no splitting in batch" in {
val input = new TransactionFactory(messageSize = 2)
val echoActor = system.actorOf(TestActors.echoActorProps)
input.subscribe(Map("test" -> (echoActor ! _)))
val tran = input.newBatchTransaction()
for (i <- 1 to 3) {
tran.add("test", tuple(6, i))
}
tran.close()
expectMsg(ChangeSet(positive = Vector(tuple(6, 3), tuple(6, 2), tuple(6, 1))))
}
"send messageSize sized messages when using continuous transactions" in {
val input = new TransactionFactory(messageSize = 2)
val echoActor = system.actorOf(TestActors.echoActorProps)
input.subscribe(Map("test" -> (echoActor ! _)))
val tran = input.newContinousTransaction()
for (i <- 1 to 3) {
tran.add("test", tuple(6, i))
}
tran.close()
expectMsg(ChangeSet(positive = Vector(tuple(6, 2), tuple(6, 1))))
expectMsg(ChangeSet(positive = Vector(tuple(6, 3))))
}
}
}
| FTSRG/ire | src/test/scala/TransactionFactoryTest.scala | Scala | epl-1.0 | 2,011 |
/*
* Copyright (C) 2016 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.persistence.cassandra.query
import java.lang.{ Long => JLong }
import java.nio.ByteBuffer
import akka.actor.{ NoSerializationVerificationNeeded, Props }
import akka.persistence.PersistentRepr
import akka.persistence.cassandra._
import akka.persistence.cassandra.journal.CassandraJournal
import akka.persistence.cassandra.query.EventsByPersistenceIdPublisher._
import akka.persistence.cassandra.query.QueryActorPublisher._
import akka.serialization.{ Serialization, SerializationExtension }
import com.datastax.driver.core._
import com.datastax.driver.core.utils.Bytes
import scala.concurrent.duration.FiniteDuration
import scala.concurrent.{ ExecutionContext, Future }
import com.datastax.driver.core.policies.RetryPolicy
private[query] object EventsByPersistenceIdPublisher {
private[query] final case class EventsByPersistenceIdSession(
selectEventsByPersistenceIdQuery: PreparedStatement,
selectInUseQuery: PreparedStatement,
selectDeletedToQuery: PreparedStatement,
session: Session,
customConsistencyLevel: Option[ConsistencyLevel],
customRetryPolicy: Option[RetryPolicy]
) extends NoSerializationVerificationNeeded {
def selectEventsByPersistenceId(
persistenceId: String,
partitionNr: Long,
progress: Long,
toSeqNr: Long,
fetchSize: Int
)(implicit ec: ExecutionContext): Future[ResultSet] = {
val boundStatement = selectEventsByPersistenceIdQuery.bind(persistenceId, partitionNr: JLong, progress: JLong, toSeqNr: JLong)
boundStatement.setFetchSize(fetchSize)
executeStatement(boundStatement)
}
def selectInUse(persistenceId: String, currentPnr: Long)(implicit ec: ExecutionContext): Future[ResultSet] =
executeStatement(selectInUseQuery.bind(persistenceId, currentPnr: JLong))
def selectDeletedTo(partitionKey: String)(implicit ec: ExecutionContext): Future[ResultSet] =
executeStatement(selectDeletedToQuery.bind(partitionKey))
private def executeStatement(statement: Statement)(implicit ec: ExecutionContext): Future[ResultSet] =
listenableFutureToFuture(
session.executeAsync(withCustom(statement))
)
private def withCustom(statement: Statement): Statement = {
customConsistencyLevel.foreach(statement.setConsistencyLevel)
customRetryPolicy.foreach(statement.setRetryPolicy)
statement
}
}
private[query] final case class EventsByPersistenceIdState(
progress: Long,
count: Long,
partitionNr: Long
)
def props(
persistenceId: String, fromSeqNr: Long, toSeqNr: Long, max: Long, fetchSize: Int,
refreshInterval: Option[FiniteDuration], session: EventsByPersistenceIdSession,
config: CassandraReadJournalConfig
): Props =
Props(
classOf[EventsByPersistenceIdPublisher], persistenceId, fromSeqNr, toSeqNr, max, fetchSize,
refreshInterval, session, config
)
}
private[query] class EventsByPersistenceIdPublisher(
persistenceId: String, fromSeqNr: Long, toSeqNr: Long, max: Long, fetchSize: Int,
refreshInterval: Option[FiniteDuration], session: EventsByPersistenceIdSession,
config: CassandraReadJournalConfig
)
extends QueryActorPublisher[PersistentRepr, EventsByPersistenceIdState](refreshInterval, config) {
import CassandraJournal.deserializeEvent
import context.dispatcher
private[this] val serialization = SerializationExtension(context.system)
override protected def initialQuery(initialState: EventsByPersistenceIdState): Future[Action] =
query(initialState.copy(partitionNr = initialState.partitionNr - 1))
override protected def completionCondition(state: EventsByPersistenceIdState): Boolean =
state.progress > toSeqNr || state.count >= max
override protected def initialState: Future[EventsByPersistenceIdState] =
highestDeletedSequenceNumber(persistenceId).map { del =>
val initialFromSequenceNr = math.max(del + 1, fromSeqNr)
val currentPnr = partitionNr(initialFromSequenceNr, config.targetPartitionSize) + 1
EventsByPersistenceIdState(initialFromSequenceNr, 0, currentPnr)
}
override protected def updateState(
state: EventsByPersistenceIdState,
row: Row
): (Option[PersistentRepr], EventsByPersistenceIdState) = {
val event = extractEvent(row)
val partitionNr = row.getLong("partition_nr") + 1
(Some(event), EventsByPersistenceIdState(event.sequenceNr + 1, state.count + 1, partitionNr))
}
private[this] def extractEvent(row: Row): PersistentRepr =
row.getBytes("message") match {
case null =>
PersistentRepr(
payload = deserializeEvent(serialization, row),
sequenceNr = row.getLong("sequence_nr"),
persistenceId = row.getString("persistence_id"),
manifest = row.getString("event_manifest"),
deleted = false,
sender = null,
writerUuid = row.getString("writer_uuid")
)
case b =>
// for backwards compatibility
persistentFromByteBuffer(serialization, b)
}
private[this] def persistentFromByteBuffer(
serialization: Serialization,
b: ByteBuffer
): PersistentRepr =
serialization.deserialize(Bytes.getArray(b), classOf[PersistentRepr]).get
override protected def requestNext(
state: EventsByPersistenceIdState,
resultSet: ResultSet
): Future[Action] = {
inUse(persistenceId, state.partitionNr).flatMap { i =>
if (i) query(state)
else Future.successful(Finished(resultSet))
}
}
protected override def requestNextFinished(
state: EventsByPersistenceIdState,
resultSet: ResultSet
): Future[Action] = {
query(state.copy(partitionNr = state.partitionNr - 1))
.flatMap { rs =>
if (rs.rs.isExhausted) requestNext(state, resultSet)
else Future.successful(rs)
}
}
private[this] def inUse(persistenceId: String, currentPnr: Long): Future[Boolean] =
session.selectInUse(persistenceId, currentPnr)
.map(rs => if (rs.isExhausted) false else rs.one().getBool("used"))
private[this] def highestDeletedSequenceNumber(partitionKey: String): Future[Long] =
session.selectDeletedTo(partitionKey)
.map(r => Option(r.one()).map(_.getLong("deleted_to")).getOrElse(0))
private[this] def partitionNr(sequenceNr: Long, targetPartitionSize: Int): Long =
(sequenceNr - 1L) / targetPartitionSize
private[this] def query(state: EventsByPersistenceIdState): Future[NewResultSet] =
session.selectEventsByPersistenceId(persistenceId, state.partitionNr, state.progress, toSeqNr, fetchSize).map(NewResultSet)
}
| kpbochenek/akka-persistence-cassandra | src/main/scala/akka/persistence/cassandra/query/EventsByPersistenceIdPublisher.scala | Scala | apache-2.0 | 6,779 |
package com.expedia.gps.geo.reactive101.server
import javax.servlet.ServletContext
import _root_.akka.actor.ActorSystem
import com.expedia.gps.geo.reactive101.server.controller.{FoodController, BasicController}
import com.typesafe.scalalogging.StrictLogging
import org.scalatra._
import scala.language.postfixOps
/**
*
* @author [email protected]
* @since 2015-09-21
*/
class ScalatraBootstrap extends LifeCycle with StrictLogging {
val system = ActorSystem()
override def init(context: ServletContext) {
context.mount(new BasicController(system), "/")
context.mount(new FoodController(system), "/food")
}
} | olmartinATexpedia/reactive101 | src/main/scala/com/expedia/gps/geo/reactive101/server/ScalatraBootstrap.scala | Scala | apache-2.0 | 636 |
package rspactors.vocab
import rdftools.rdf.vocab.VocabGen
object LdnVocabGen {
def main(args:Array[String])={
VocabGen.generateVocabs
}
} | jpcik/ldn-streams | src/main/scala/rspactors/vocab/VocabGen.scala | Scala | mit | 148 |
package slamdata.engine.fs
import scalaz._
import Scalaz._
import org.specs2.mutable._
import slamdata.engine.{DisjunctionMatchers}
class PathSpecs extends Specification with DisjunctionMatchers {
"Path.apply" should {
"Parse empty string as root" in {
Path("") must_== Path.Root
}
"Parse root string as root" in {
Path("/") must_== Path.Root
}
"Parse current as current" in {
Path(".") must_== Path.Current
}
"Parse multiple slashes as root" in {
Path("/////////////////////////////") must_== Path.Root
}
"Parse trailing slash as pure directory" in {
Path("/foo/bar/baz/") must_== Path.dir("foo" :: "bar" :: "baz" :: Nil)
}
"Parse lack of trailing slash as file" in {
Path("/foo/bar/baz") must_== Path.file("foo" :: "bar" :: Nil, "baz")
}
"Correctly parse root file" in {
Path("/foo") must_== Path.file(Nil, "foo")
}
"Parse raw file as relative file" in {
Path("foo") must_== Path.file("." :: Nil, "foo")
}
"Parse raw relative file as relative file" in {
Path("./foo") must_== Path.file("." :: Nil, "foo")
}
"Parse raw directory as relative directory" in {
Path("foo/") must_== Path.dir("." :: "foo" :: Nil)
}
"Parse raw relative directory as relative directory" in {
Path("./foo/") must_== Path.dir("." :: "foo" :: Nil)
}
"Parse hidden file as hidden file" in {
Path(".foo") must_== Path.file("." :: Nil, ".foo")
}
"Parse hidden directory as hidden directory" in {
Path(".foo/") must_== Path.dir("." :: ".foo" :: Nil)
}
}
"Path.++" should {
"concatentate abs dir with rel file" in {
Path("/sd/") ++ Path("./tmp/5") must_== Path("/sd/tmp/5")
}
"concatentate rel dir with rel dir" in {
Path("./foo/") ++ Path("./bar/") must_== Path("./foo/bar/")
}
}
"Path.head" should {
"preserve pure file" in {
val p = Path("foo")
p.head must_== p
}
"return root for root" in {
Path("/").head must_== Path("/")
}
"return only dir for abs" in {
Path("/foo/").head must_== Path("/foo/")
}
"return parent dir for nested abs" in {
Path("/foo/bar").head must_== Path("/foo/")
}
"return only dir for relative" in {
Path("foo/").head must_== Path("foo/")
}
"return parent dir for relative" in {
Path("foo/bar/").head must_== Path("foo/")
}
"return file for relative fiel" in {
Path("foo").head must_== Path("foo")
}
"return parent dir for relative file" in {
Path("foo/bar").head must_== Path("foo/")
}
}
"Path.pathname" should {
"render root correctly" in {
Path.Root.pathname must_== "/"
}
"render current correctly" in {
Path.Current.pathname must_== "./"
}
"render absolute pure dir correctly" in {
Path("/foo/bar/baz/").pathname must_== "/foo/bar/baz/"
}
"render absolute file correctly" in {
Path("/foo/bar/baz").pathname must_== "/foo/bar/baz"
}
"render relative pure dir correctly" in {
Path("./foo/bar/baz/").pathname must_== "./foo/bar/baz/"
}
"render relative file correctly" in {
Path("./foo/bar/baz").pathname must_== "./foo/bar/baz"
}
}
"Path.relative" should {
"be false for absolute path" in {
Path("/foo").relative must beFalse
}
"be true for relative path" in {
Path("./foo").relative must beTrue
}
}
"Path.contains" should {
"return true when parent contains child dir" in {
Path("/foo/bar/").contains(Path("/foo/bar/baz/")) must beTrue
}
"return true when parent contains child file" in {
Path("/foo/bar/").contains(Path("/foo/bar/baz")) must beTrue
}
"return true for abs path that contains itself" in {
Path("/foo/bar/").contains(Path("/foo/bar/")) must beTrue
}
"return true for rel path when parent contains child dir" in {
Path("./foo/bar/").contains(Path("./foo/bar/baz/")) must beTrue
}
"return true for rel path when parent contains child file" in {
Path("./foo/bar/").contains(Path("./foo/bar/baz")) must beTrue
}
"return true for rel path that contains itself" in {
Path("./foo/bar/").contains(Path("./foo/bar/")) must beTrue
}
}
"Path.asAbsolute" should {
"not modify /" in {
Path("/").asAbsolute must_== Path("/")
}
"not modify unnested dir" in {
Path("/foo/").asAbsolute must_== Path("/foo/")
}
"not modify nested dir" in {
Path("/foo/bar/").asAbsolute must_== Path("/foo/bar/")
}
"prefix unnested relative dir" in {
Path("foo/").asAbsolute must_== Path("/foo/")
}
"prefix nested relative dir" in {
Path("foo/bar/").asAbsolute must_== Path("/foo/bar/")
}
"not modify simple file" in {
Path("/foo").asAbsolute must_== Path("/foo")
}
"not modify nested file" in {
Path("/foo/bar").asAbsolute must_== Path("/foo/bar")
}
"prefix simple relative file" in {
Path("foo").asAbsolute must_== Path("/foo")
}
"prefix nested relative file" in {
Path("foo/bar").asAbsolute must_== Path("/foo/bar")
}
}
"Path.asDir" should {
"not modify /" in {
Path("/").asDir must_== Path("/")
}
"not modify unnested dir" in {
Path("/foo/").asDir must_== Path("/foo/")
}
"not modify nested dir" in {
Path("/foo/bar/").asDir must_== Path("/foo/bar/")
}
"not modify unnested relative dir" in {
Path("foo/").asDir must_== Path("foo/")
}
"not modify nested relative dir" in {
Path("foo/bar/").asDir must_== Path("foo/bar/")
}
"convert simple file" in {
Path("/foo").asDir must_== Path("/foo/")
}
"convert nested file" in {
Path("/foo/bar").asDir must_== Path("/foo/bar/")
}
"convert simple relative file" in {
Path("foo").asDir must_== Path("foo/")
}
"convert nested relative file" in {
Path("foo/bar").asDir must_== Path("foo/bar/")
}
}
"Path.parent" should {
"be root for root" in {
Path("/").parent must_== Path("/")
}
"be root for simple file" in {
Path("/foo").parent must_== Path("/")
}
"be root for dir" in {
Path("/foo/").parent must_== Path("/")
}
"be parent for nested file" in {
Path("/foo/bar/baz").parent must_== Path("/foo/bar/")
}
"be parent for nested dir" in {
Path("/foo/bar/baz/").parent must_== Path("/foo/bar/")
}
}
"Path.ancestors" should {
"contain root" in {
Path("/").ancestors must contain(Path("/"))
}
"contain root and not file" in {
Path("/foo").ancestors must contain(Path("/"))
}
"contain root and dir" in {
Path("/foo/").ancestors must contain(Path("/"), Path("/foo/"))
}
"return root, parent, and not file" in {
Path("/foo/bar").ancestors must contain(Path("/"), Path("/foo/"))
}
"return root, parent, and dir" in {
Path("/foo/bar/").ancestors must contain(Path("/"), Path("/foo/"), Path("/foo/bar/"))
}
}
"Path.rebase" should {
"match root to root" in {
Path("/").rebase(Path("/")) must beRightDisj(Path("./"))
}
"match dir to same dir" in {
Path("/foo/").rebase(Path("/foo/")) must beRightDisj(Path("./"))
}
"match file to its dir" in {
Path("/foo/bar").rebase(Path("/foo/")) must beRightDisj(Path("./bar"))
}
"match file to parent's dir" in {
Path("/foo/bar/baz").rebase(Path("/foo/")) must beRightDisj(Path("./bar/baz"))
}
"fail with file" in {
Path("/foo/bar").rebase(Path("/foo")) must beAnyLeftDisj
}
}
"Path.interpret" should {
"leave relative path intact with matching ref and working dirs" in {
Path("foo").interpret(Path("/"), Path("/")) must beRightDisj(Path("foo"))
}
"make simple file relative to ref dir" in {
Path("bar").interpret(Path("/"), Path("/foo/")) must beRightDisj(Path("foo/bar"))
}
"make absolute path relative to ref dir" in {
Path("/foo/bar").interpret(Path("/foo/"), Path("/anything/")) must beRightDisj(Path("bar"))
}
"fail with path outside ref dir" in {
Path("/other").interpret(Path("/foo/"), Path("/anything/")) must beAnyLeftDisj
}
"fail with relative ref dir" in {
Path("foo").interpret(Path("rel/"), Path("/anything/")) must beAnyLeftDisj
}
"fail with ref path not a dir" in {
Path("foo").interpret(Path("/file"), Path("/anything/")) must beAnyLeftDisj
}
"fail with relative working dir" in {
Path("foo").interpret(Path("/anything/"), Path("rel/")) must beAnyLeftDisj
}
"fail with working path not a dir" in {
Path("foo").interpret(Path("/anything/"), Path("/file")) must beAnyLeftDisj
}
}
"FSTable.lookup" should {
"find root" in {
FSTable(Map(Path("/") -> "foo")).lookup(Path("/")) must beSome(("foo", Path("/"), Path(".")))
}
"find file in root" in {
FSTable(Map(Path("/") -> "foo")).lookup(Path("/bar")) must beSome(("foo", Path("/"), Path("./bar")))
}
"handle no mounts" in {
FSTable(Map()).lookup(Path("/")) must beNone
}
"handle unmounted path" in {
FSTable(Map(Path("foo") -> "foo")).lookup(Path("/bar")) must beNone
}
"find file with two mounts" in {
FSTable(Map(Path("foo") -> "foo", Path("bar") -> "bar")).lookup(Path("/foo/buz")) must beSome(("foo", Path("/foo/"), Path("./buz")))
}
"find nested file with two mounts" in {
FSTable(Map(Path("foo") -> "foo", Path("bar") -> "bar")).lookup(Path("/bar/buz/quux")) must beSome(("bar", Path("/bar/"), Path("./buz/quux")))
}
}
"FSTable.children" should {
"find two mounts" in {
FSTable(Map(Path("foo") -> "foo", Path("bar/buz") -> "buz")).children(Path("/")) must contain(Path("foo/"), Path("bar/"))
}
"find one of two mounts" in {
FSTable(Map(Path("foo") -> "foo", Path("bar/buz") -> "buz")).children(Path("/bar/")) must contain(Path("buz/"))
}
}
} | mossprescott/quasar | src/test/scala/slamdata/engine/fs/path.scala | Scala | agpl-3.0 | 10,156 |
package org.jetbrains.plugins.scala
package finder
import com.intellij.ide.highlighter.{JavaClassFileType, JavaFileType}
import com.intellij.ide.scratch.ScratchUtil
import com.intellij.openapi.fileTypes.{FileType, FileTypeRegistry, LanguageFileType}
import com.intellij.openapi.module.Module
import com.intellij.openapi.project.Project
import com.intellij.openapi.roots.ProjectRootManager
import com.intellij.openapi.vfs.VirtualFile
import com.intellij.psi.search.searches.{MethodReferencesSearch, ReferencesSearch}
import com.intellij.psi.search.{GlobalSearchScope, LocalSearchScope, SearchScope}
import org.jetbrains.jps.model.java.JavaModuleSourceRootTypes
import org.jetbrains.plugins.scala.extensions.ObjectExt
import org.jetbrains.plugins.scala.lang.psi.compiled.SigFileType
import org.jetbrains.plugins.scala.tasty.TastyFileType
import org.jetbrains.plugins.scala.util.HashBuilder._
sealed abstract class FilterScope(val delegate: GlobalSearchScope)
(implicit project: Project)
extends GlobalSearchScope(project) {
private val fileIndex =
ProjectRootManager.getInstance(project).getFileIndex
protected final def isInSourceContent(file: VirtualFile): Boolean =
fileIndex.isUnderSourceRootOfType(file, JavaModuleSourceRootTypes.SOURCES)
protected final def isInLibraryClasses(file: VirtualFile): Boolean =
fileIndex.isInLibraryClasses(file)
override final def contains(file: VirtualFile): Boolean =
(delegate == null || delegate.contains(file)) && mayContain(file)
protected def mayContain(file: VirtualFile): Boolean
override def compare(file1: VirtualFile, file2: VirtualFile): Int =
if (delegate != null) delegate.compare(file1, file2) else 0
override def isSearchInModuleContent(aModule: Module): Boolean =
delegate == null || delegate.isSearchInModuleContent(aModule)
override def isSearchInLibraries: Boolean =
delegate == null || delegate.isSearchInLibraries
override def calcHashCode(): Int =
this.getClass.hashCode() #+ delegate.hashCode()
override def equals(other: Any): Boolean = other match {
case that: FilterScope if this.getClass == that.getClass =>
delegate == that.delegate
case _ => false
}
}
final class ScalaFilterScope private(delegate: GlobalSearchScope)
(implicit project: Project)
extends FilterScope(delegate) {
override protected def mayContain(file: VirtualFile): Boolean =
FileTypeRegistry.getInstance.getFileTypeByFile(file) match {
case _: JavaClassFileType | SigFileType | TastyFileType =>
isInLibraryClasses(file)
case fileType: LanguageFileType =>
val hasScala = fileType.getLanguage.isKindOf(ScalaLanguage.INSTANCE) || ScalaLanguageDerivative.existsFor(fileType)
if (hasScala)
isInSourceContent(file) || ScratchUtil.isScratch(file)
else
false
case _ =>
false
}
}
object ScalaFilterScope {
def apply(parameters: ReferencesSearch.SearchParameters): SearchScope =
apply(parameters.getEffectiveSearchScope)(parameters.getProject)
def apply(parameters: MethodReferencesSearch.SearchParameters): SearchScope =
apply(parameters.getEffectiveSearchScope)(parameters.getProject)
def apply(delegate: GlobalSearchScope)
(implicit project: Project): ScalaFilterScope = new ScalaFilterScope(delegate)(project)
def apply(scope: SearchScope)
(implicit project: Project): SearchScope = scope match {
case global: GlobalSearchScope => apply(global)
case local: LocalSearchScope => new LocalSearchScope(
local.getScope.filter(_.getLanguage.isKindOf(ScalaLanguage.INSTANCE)),
local.getDisplayName + " in scala",
local.isIgnoreInjectedPsi
)
case _ => scope
}
}
final class SourceFilterScope private(delegate: GlobalSearchScope, fileTypes: Seq[FileType])
(implicit project: Project)
extends FilterScope(GlobalSearchScope.getScopeRestrictedByFileTypes(delegate, fileTypes: _*)) {
override protected def mayContain(file: VirtualFile): Boolean = isInSourceContent(file)
}
object SourceFilterScope {
import GlobalSearchScope.projectScope
def apply(delegate: GlobalSearchScope, fileTypes: Seq[FileType])
(implicit project: Project): SourceFilterScope =
new SourceFilterScope(delegate, fileTypes)
def apply(fileTypes: Seq[FileType])
(implicit project: Project): SourceFilterScope =
new SourceFilterScope(projectScope(project), fileTypes)
def apply(scope: GlobalSearchScope)
(implicit project: Project): SourceFilterScope =
new SourceFilterScope(scope, Seq(ScalaFileType.INSTANCE, JavaFileType.INSTANCE))
}
abstract class ResolveFilterScopeBase(delegate: GlobalSearchScope)
(implicit project: Project)
extends FilterScope(delegate) {
override protected def mayContain(file: VirtualFile): Boolean =
isInLibraryClasses(file) || isInSourceContent(file) || ScratchUtil.isScratch(file)
}
final class ResolveFilterScope(delegate: GlobalSearchScope)
(implicit project: Project)
extends ResolveFilterScopeBase(delegate) {
override def mayContain(file: VirtualFile): Boolean =
super.mayContain(file) && !file.getFileType.is[FileTypeWithIsolatedDeclarations]
}
object ResolveFilterScope {
def apply(delegate: GlobalSearchScope)
(implicit project: Project): ResolveFilterScope =
new ResolveFilterScope(delegate)
}
final class WorksheetResolveFilterScope(delegate: GlobalSearchScope,
val worksheetFile: VirtualFile)
(implicit project: Project)
extends ResolveFilterScopeBase(delegate){
override def equals(other: Any): Boolean =
super.equals(other) && (other match {
case wsScope: WorksheetResolveFilterScope => this.worksheetFile == wsScope.worksheetFile
case _ => false
})
override def calcHashCode(): Int =
super.calcHashCode() #+ worksheetFile.hashCode()
override def mayContain(file: VirtualFile): Boolean =
super.mayContain(file) && {
if (file.getFileType.is[FileTypeWithIsolatedDeclarations])
file == worksheetFile // worksheet elements shouldn't be available outside the worksheet
else
true
}
}
object WorksheetResolveFilterScope {
def apply(delegate: GlobalSearchScope, worksheetFile: VirtualFile)
(implicit project: Project): WorksheetResolveFilterScope =
new WorksheetResolveFilterScope(delegate, worksheetFile)(project)
} | JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/finder/ScalaFilterScope.scala | Scala | apache-2.0 | 6,665 |
package oauthorize.model
import oauth2.spec.StatusCodes
import oauth2.spec.model.ErrorResponse
import types._
/**
* Validity expressed in seconds
* Created is time stamp milliseconds
*/
sealed trait Expirable {
def validity: Long
def created: Long
def isExpired = created + validity * 1000 < System.currentTimeMillis
def validityRemaining = created / 1000 + validity - System.currentTimeMillis / 1000
}
case class AuthzRequest(
code: Option[String],
clientId: String,
responseType: ResponseType,
redirectUri: String,
authScope: Seq[String],
approved: Boolean,
validity: Long,
created: Long,
state: Option[State] = None,
user: Option[Oauth2User] = None) extends Expirable with AuthzRequestValidation
case class AccessTokenRequest(
grantType: GrantType,
authzCode: String,
redirectUri: String,
clientId: Option[String]) extends AccessTokenRequestValidation
case class RefreshTokenRequest(
grantType: GrantType,
refreshToken: String) extends RefreshTokenRequestValidation
case class ClientCredentialsRequest(
grantType: GrantType,
client: Oauth2Client,
authScope: Option[String]) extends ClientCredentialsRequestValidation
case class ResourceOwnerCredentialsRequest(
grantType: GrantType,
username: String,
password: String,
authScope: Seq[String]) extends ResourceOwnerCredentialsRequestValidation
abstract class Token {
def value: String
def clientId: String
def tokenScope: Seq[String]
def validity: Long
def created: Long
def userId: Option[UserId]
}
case class AccessToken(
value: String,
clientId: String,
tokenScope: Seq[String],
validity: Long,
created: Long,
userId: Option[UserId]) extends Token with Expirable
case class RefreshToken(
value: String,
clientId: String,
tokenScope: Seq[String],
validity: Long,
created: Long,
userId: Option[UserId]) extends Token with Expirable
case class AccessAndRefreshTokens(
accessToken: AccessToken,
refreshToken: Option[RefreshToken] = None)
trait OauthRequest {
def path: String
def param(key: String): Option[String]
def header(key: String): Option[String]
def method: String
def params: Map[String, String]
override def toString = s"$method '$path' $params"
}
trait OauthResponse
case class OauthRedirect(
uri: String,
params: Map[String, String],
paramsAsUrlFragment: Boolean = false) extends OauthResponse
case class InitiateAuthzApproval(
authzRequest: AuthzRequest,
client: Oauth2Client,
csrfToken: Option[String] = None) extends OauthResponse
case class Err(
error: String,
error_description: Option[String] = None,
error_uri: Option[String] = None,
@transient redirect_uri: Option[String] = None,
@transient status_code: Int = StatusCodes.BadRequest) extends ErrorResponse(error, error_description, error_uri) with OauthResponse
case class Oauth2Client(
clientId: String,
secretInfo: SecretInfo,
scope: Seq[String] = Seq(),
authorizedGrantTypes: Seq[String] = Seq(),
redirectUri: String,
authorities: Seq[String] = Seq(),
accessTokenValidity: Long = 3600,
refreshTokenValidity: Long = 604800,
additionalInfo: Option[String],
autoapprove: Boolean = false) {
def invalidScopes(sc: Option[String]): Boolean = sc.exists(v => invalidScopes(v.split(" ")))
def invalidScopes(sc: Seq[String]): Boolean = sc.foldLeft(false) { (acc, curr) => acc || !scope.contains(curr) }
}
case class UserId(value: String, provider: Option[String])
case class SecretInfo(secret: String, salt: Option[String] = None)
case class Oauth2User(id: UserId, pwd: Option[SecretInfo] = None)
case class ClientAuthentication(clientId: String, clientSecret: String)
object types {
type GrantType = String
type ResponseType = String
type State = String
} | adaptorel/oauthorize | oauthorize-core/src/main/scala/models/model.scala | Scala | apache-2.0 | 3,758 |
package controllers
import com.google.inject.Inject
import models.AcquireCacheKeyPrefix.CookiePrefix
import models.CompleteAndConfirmFormModel.AllowGoingToCompleteAndConfirmPageCacheKey
import models.VehicleTaxOrSornFormModel.Form.{SelectId, SornFormError, SornId, SornVehicleId}
import models.{ErrorType, NoSelection, NoSorn, VehicleTaxOrSornFormModel, VehicleTaxOrSornViewModel}
import play.api.data.{Form, FormError}
import play.api.mvc.{Action, Controller, Request}
import uk.gov.dvla.vehicles.presentation.common.LogFormats.DVLALogger
import uk.gov.dvla.vehicles.presentation.common.clientsidesession.ClientSideSessionFactory
import uk.gov.dvla.vehicles.presentation.common.clientsidesession.CookieImplicits.{RichCookies, RichForm, RichResult}
import uk.gov.dvla.vehicles.presentation.common.model.{NewKeeperDetailsViewModel, NewKeeperEnterAddressManuallyFormModel, VehicleAndKeeperDetailsModel}
import uk.gov.dvla.vehicles.presentation.common.views.helpers.FormExtensions.formBinding
import utils.helpers.Config
import views.html.acquire.vehicle_tax_or_sorn
class VehicleTaxOrSorn @Inject()()(implicit clientSideSessionFactory: ClientSideSessionFactory,
config: Config) extends Controller with DVLALogger {
private[controllers] val form = Form(
VehicleTaxOrSornFormModel.Form.Mapping
)
private final val NoCookiesFoundMessage = "Failed to find new keeper details and or vehicle details in cache. " +
"Now redirecting to vehicle lookup"
def present = Action { implicit request =>
val newKeeperDetailsOpt = request.cookies.getModel[NewKeeperDetailsViewModel]
val vehicleAndKeeperDetailsOpt = request.cookies.getModel[VehicleAndKeeperDetailsModel]
(newKeeperDetailsOpt, vehicleAndKeeperDetailsOpt) match {
case (Some(newKeeperDetails), Some(vehicleAndKeeperDetails)) =>
logMessage(request.cookies.trackingId(), Info, "Presenting vehicle tax or sorn view")
Ok(vehicle_tax_or_sorn(VehicleTaxOrSornViewModel(form.fill(), vehicleAndKeeperDetails, newKeeperDetails)))
case _ =>
redirectToVehicleLookup(NoCookiesFoundMessage)
}
}
private def redirectToVehicleLookup(message: String)
(implicit request: Request[_]) = {
logMessage(request.cookies.trackingId(), Warn, message)
Redirect(routes.VehicleLookup.present())
}
def back = Action { implicit request =>
request.cookies.getModel[NewKeeperEnterAddressManuallyFormModel] match {
case Some(manualAddress) =>
logMessage(request.cookies.trackingId(), Debug,
s"Redirecting to ${routes.NewKeeperEnterAddressManually.present()}")
Redirect(routes.NewKeeperEnterAddressManually.present())
case None =>
logMessage(request.cookies.trackingId(), Debug,
s"Redirecting to ${routes.NewKeeperChooseYourAddress.present()}")
Redirect(routes.NewKeeperChooseYourAddress.present())
}
}
def submit = Action { implicit request =>
form.bindFromRequest.fold(
invalidForm => { // Note this code should never get executed as only an optional checkbox is posted
val newKeeperDetailsOpt = request.cookies.getModel[NewKeeperDetailsViewModel]
val vehicleAndKeeperDetailsOpt = request.cookies.getModel[VehicleAndKeeperDetailsModel]
(newKeeperDetailsOpt, vehicleAndKeeperDetailsOpt) match {
case (Some(newKeeperDetails), Some(vehicleAndKeeperDetails)) =>
val (errorForm, error) = formWithReplacedErrors(invalidForm)
BadRequest(vehicle_tax_or_sorn(VehicleTaxOrSornViewModel(errorForm,
vehicleAndKeeperDetails, newKeeperDetails, error)))
case _ => redirectToVehicleLookup(NoCookiesFoundMessage)
}
},
validForm => {
logMessage(request.cookies.trackingId(), Debug, s"Redirecting to ${routes.CompleteAndConfirm.present()}")
Redirect(routes.CompleteAndConfirm.present())
.withCookie(validForm)
.withCookie(AllowGoingToCompleteAndConfirmPageCacheKey, "true")
}
)
}
private def formWithReplacedErrors(form: Form[VehicleTaxOrSornFormModel]): (Form[VehicleTaxOrSornFormModel], ErrorType) = {
(
if ( form.data.get("select").exists(_ == SornId) && form.globalError.isDefined) {
form.replaceError(
"", FormError(key = SornFormError, message = "error.sornformerror", args = Seq.empty)
).replaceError(
SornVehicleId, FormError(key = SornVehicleId,message = "error.sornVehicleid", args = Seq.empty)
).replaceError(
SelectId, FormError(key = SelectId, message = "error.sornselectid", args = Seq.empty)
).distinctErrors
} else {
form.replaceError(
"", FormError(key = SornFormError, message = "error.nosornformerror", args = Seq.empty)
).replaceError(
SornVehicleId, FormError(key = SornVehicleId,message = "error.sornVehicleid", args = Seq.empty)
).replaceError(
SelectId, FormError(key = SelectId, message = "error.sornselectid", args = Seq.empty)
).distinctErrors
}
, if (form.globalError.isDefined) NoSorn else NoSelection)
}
} | dvla/vehicles-acquire-online | app/controllers/VehicleTaxOrSorn.scala | Scala | mit | 5,210 |
import sbt._
object Settings {
object Versions {
val akka = "2.5.22"
val akkaActor = "2.5.22"
val scalaTest = "3.0.0"
val akkaHttp = "10.1.8"
val kOAuth = "2.0.0"
val scalaCache = "0.9.3"
val scalaLogging = "3.5.0"
val scalaTags = "0.6.8"
val logback = "1.1.9"
val twitterText = "1.6.1"
}
object Dependencies {
val akkaActor = "com.typesafe.akka" %% "akka-actor" % Versions.akkaActor
val akkaStream = "com.typesafe.akka" %% "akka-stream" % Versions.akka
val akkaStreamTestkit = "com.typesafe.akka" %% "akka-stream-testkit" % Versions.akka
val akkaHttp = "com.typesafe.akka" %% "akka-http-core" % Versions.akkaHttp
val akkaHttpSprayJson = "com.typesafe.akka" %% "akka-http-spray-json" % Versions.akkaHttp
val scalaTest = "org.scalatest" %% "scalatest" % Versions.scalaTest % "test"
val kOAuth = "com.hunorkovacs" %% "koauth" % Versions.kOAuth excludeAll(
ExclusionRule(organization = "org.specs2"))
val scalaCacheCaffeine = "com.github.cb372" %% "scalacache-caffeine" % Versions.scalaCache
val scalaLogging = "com.typesafe.scala-logging" %% "scala-logging" % Versions.scalaLogging
val scalaTags = "com.lihaoyi" %% "scalatags" % Versions.scalaTags
val logback = "ch.qos.logback" % "logback-classic" % Versions.logback
val twitterText = "com.twitter" % "twitter-text" % Versions.twitterText
}
val dependencies: Seq[ModuleID] = {
import Dependencies._
Seq(
akkaActor,
akkaStream,
akkaStreamTestkit,
akkaHttp,
akkaHttpSprayJson,
scalaTest,
kOAuth,
scalaCacheCaffeine,
scalaLogging,
scalaTags,
logback,
twitterText
)
}
}
| becompany/akka-social-stream | project/Settings.scala | Scala | apache-2.0 | 1,710 |
/*
* Copyright 2015 Webtrends (http://www.webtrends.com)
*
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webtrends.harness
import java.util.concurrent.TimeUnit
import akka.actor.{Actor, ActorSystem, PoisonPill, Props}
import akka.pattern.ask
import akka.testkit.TestKit
import akka.util.Timeout
import com.webtrends.harness.utils.ActorWaitHelper
import org.specs2.mutable.SpecificationLike
import scala.concurrent.Await
import scala.concurrent.duration.Duration
class WaitedOnActor extends Actor with ActorWaitHelper {
def receive: Receive = {
case "message" => sender ! "waitedResponse"
}
}
class WaitActor extends Actor with ActorWaitHelper {
implicit val timeout = Timeout(5000, TimeUnit.MILLISECONDS)
val waited = awaitActor(Props[WaitedOnActor])
def receive: Receive = {
case "message" => sender ! "response"
case "waited" => sender ! Await.result((waited ? "message").mapTo[String], Duration(5, "seconds"))
}
}
class ActorWaitSpec extends TestKit(ActorSystem("wait-spec")) with SpecificationLike {
implicit val timeout = Timeout(5000, TimeUnit.MILLISECONDS)
val waitActor = ActorWaitHelper.awaitActor(Props[WaitActor], system)
sequential
"ActorWaitSpec" should {
"await the WaitActor successfully " in {
Await.result((waitActor ? "message").mapTo[String], Duration(5, "seconds")) must beEqualTo("response")
}
"the WaitActor's awaited actor must have come up " in {
Await.result((waitActor ? "waited").mapTo[String], Duration(5, "seconds")) must beEqualTo("waitedResponse")
}
}
step {
waitActor ! PoisonPill
}
}
| pcross616/wookiee | wookiee-test/src/test/scala/com/webtrends/harness/ActorWaitSpec.scala | Scala | apache-2.0 | 2,245 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import uk.gov.hmrc.ct.box.{CtBoxIdentifier, CtOptionalInteger, Linked}
case class CP246(value: Option[Int]) extends CtBoxIdentifier(name = "Allowances") with CtOptionalInteger
object CP246 extends Linked[CP93, CP246] {
override def apply(source: CP93): CP246 = CP246(source.value)
}
| liquidarmour/ct-calculations | src/main/scala/uk/gov/hmrc/ct/computations/CP246.scala | Scala | apache-2.0 | 930 |
package com.sksamuel.elastic4s.requests.security.roles
import com.sksamuel.elastic4s.testkit.DockerTests
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
import scala.util.Try
class UpdateRoleTest extends AnyWordSpec with Matchers with DockerTests {
Try {
client.execute {
deleteRole("role1")
}.await
}
client.execute {
createRole("role1")
}.await
"UpdateRole Http Request" should {
"update the role" in {
val indexPrivileges = Seq(IndexPrivileges(
Seq("index1", "index2"),
Seq("read"),
allow_restricted_indices=Some(false)
))
val resp = client.execute {
updateRole(
"role1",
indices=indexPrivileges
)
}.await
val roles = client.execute {
getRole("role1")
}.await
resp.result.role.created shouldBe false
roles.result.contains("role1") shouldBe true
roles.result("role1").indices shouldBe indexPrivileges
}
}
}
| stringbean/elastic4s | elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/requests/security/roles/UpdateRoleTest.scala | Scala | apache-2.0 | 937 |
/**
* Copyright 2012-2013 StackMob
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stackmob.scaliak
import com.basho.riak.client.raw.pbc.PBClientAdapter
import com.basho.riak.pbc.{RiakClient => PBRiakClient, MapReduceResponseSource, RequestMeta}
import com.basho.riak.client.raw.query.MapReduceSpec
import com.basho.riak.pbc.mapreduce.MapReduceResponse
import com.basho.riak.client.http.util.Constants
import scalaz.IterV
import scalaz.IterV._
import scalaz.effect.IO
import scalaz.syntax.monad._
import com.fasterxml.jackson.databind.ObjectMapper
import annotation.tailrec
import scala.collection.JavaConverters._
class PBStreamingClient(host: String, port: Int) extends PBClientAdapter(host, port) with RawClientWithStreaming {
val pbClient = new PBRiakClient(host, port)
val mapper = new ObjectMapper()
override def mapReduce[T, U, A](spec: MapReduceSpec, elementClass: Class[T], converter: T => U, iter: IterV[U, A]): IO[IterV[U, A]] = {
val meta = new RequestMeta()
meta.contentType(Constants.CTYPE_JSON)
val source = pbClient.mapReduce(spec.getJSON, meta)
def deserialize(resp: MapReduceResponse): T = {
mapper.readValue[java.util.Collection[T]](
resp.getJSON.toString,
mapper.getTypeFactory.constructCollectionType(classOf[java.util.Collection[_]], elementClass)
).asScala.head
}
@tailrec
def feedFromSource(source: MapReduceResponseSource, iter: IterV[U, A]): IO[IterV[U, A]] = iter match {
case _ if source.isClosed => iter.pure[IO]
case Done(_, _) => iter.pure[IO]
case Cont(k) if !source.hasNext => feedFromSource(source, k(Empty[U]))
case Cont(k) => {
val next = source.next()
if(Option(next.getJSON).isDefined) {
feedFromSource(source, k(El(converter(deserialize(next)))))
} else {
iter.pure[IO]
}
}
}
feedFromSource(source, iter)
}
}
| stackmob/scaliak | src/main/scala/com/stackmob/scaliak/PBStreamingClient.scala | Scala | apache-2.0 | 2,436 |
package nn.conv.versions
/**
* Created by nm on 09/01/17.
* Input channels are parallelised across workgroups
*/
import ir.TupleType
import ir.ast._
import lift.arithmetic.SizeVar
import nn._
import nn.conv.{Conv, ConvCompanion, ConvDatasets, SlidingWindowConfig}
import opencl.executor.{Execute, Executor}
import opencl.ir._
import opencl.ir.pattern._
object Conv1 extends ConvCompanion {
val kernel_xdim_SV = SizeVar("kernel_xdim")
val kernel_ydim_SV = SizeVar("kernel_ydim")
val input_xdim_SV = SizeVar("input_xdim")
val input_ydim_SV = SizeVar("input_ydim")
val layer_idim_SV = SizeVar("layer_idim")
val layer_odim_SV = SizeVar("layer_odim")
val in_channels_SV = SizeVar("in_channels")
val out_channels_SV = SizeVar("out_channels")
val n_inputs_SV = SizeVar("n_inputs")
val n_batches_SV = SizeVar("n_batches")
/* Sequential layer */
def Seq(activation_f: UserFun, n_inputs: Int, input_xdim: Int, input_ydim: Int, in_channels: Int,
kernel_xdim: Int, kernel_ydim: Int, out_channels: Int): Array[FunDecl] = Array(λ(
// Lift K: y, x, i, o
// Caffe K: o, i, y, x
// AT(AT(AT(AT(Float, out_channels_SV), in_channels_SV), kernel_xdim_SV), kernel_ydim_SV),
AT(AT(AT(AT(Float, kernel_xdim), kernel_ydim), in_channels), out_channels),
AT(Float, out_channels),
// Lift X: n, y, x, c
// Caffe X: n, c, y, x
// AT(AT(AT(AT(Float, in_channels_SV), input_xdim_SV), input_ydim_SV), n_inputs_SV),
AT(AT(AT(AT(Float, input_xdim), input_ydim), in_channels), n_inputs),
(K, B, X) => {
// n, y, x, c -> n, c, y, x
Map(TransposeW() o Map(TransposeW())) o
MapSeq(λ((single_input) => {
MapSeq(λ((pass_strip) => {
MapSeq(λ((pass_window) => {
Join() o
ReduceSeq(fun((acc, element) => {
val input_element = Get(element, 0)
val kernel_element = Get(element, 1)
Join() o
MapSeq(fun((out_channel) => {
val acc_out_channel = Get(out_channel, 0)
val kernel_out_channel = Get(out_channel, 1)
ReduceSeq(add, acc_out_channel) o
MapSeq(fun((x, w) =>
mult(x, w)
)) $ Zip(input_element, kernel_out_channel)
})) $ Zip(acc, kernel_element)
}), MapSeq(id) $ B
) $ Zip(Join() $ pass_window,
// o, i, y, x ->
// o, y, i, x ->
// o, y, x, i ->
// y, o, x, i ->
// y, x, o, i ->
// y * x, o, i
Join() o Map(/*Map(Transpose()) o */Transpose()) o Transpose() o Map(Map(Transpose()) o Transpose()) $ K)
})) $ pass_strip
})) o Slide2D(kernel_ydim, 1, kernel_xdim, 1) $ single_input
})) o
// n, c, y, x ->
// n, y, c, x ->
// n, y, x, c
Map(Map(Transpose()) o Transpose()) $ X
}
))
def main(args: Array[String]): Unit = {
Executor.loadLibrary()
Executor.init(1, 0)
// println(Compile(Seq(Linear)))
val input_xdim = 227
val input_ydim = 227
val in_channels = 3
val n_inputs = 5
val kernel_xdim = 11
val kernel_ydim = 11
val out_channels = 96
val K = Array.fill[Array3D[Float]](out_channels)(
Array.fill[Array2D[Float]](in_channels)(
Array.fill[Array[Float]](kernel_ydim)(
Array.fill[Float](kernel_xdim)(1.0f))))
val B = Array.fill[Float](out_channels)(1.0f)
val X = Array.fill[Array3D[Float]](n_inputs)(
Array.fill[Array2D[Float]](in_channels)(
Array.fill[Array[Float]](input_ydim)(
Array.fill[Float](input_xdim)(1.0f))))
val (outputsFlat: Array[Float], runtime) = Execute(1, 1)[Array[Float]](
Seq(Linear, n_inputs, input_xdim, input_ydim, in_channels,
kernel_xdim, kernel_ydim, out_channels)(0), K, B, X)
println(outputsFlat)
}
val locA: Int = 0
val locB: Int = 1
val locC: Int = 2
/* Parallel layer */
def Par(activationF: UserFun, inputShape: Shape, tiling: SlidingWindowConfig, nKernels: Int,
sliding: SlidingWindowConfig,
kernelsPerGroup: Int, seqElsPerThread: Int, vectorLen: Int,
coalesce: Boolean, unrollReduce: Boolean): Array[FunDecl] = {
val n_tiles_per_input: Int = tiling.n
val n_kwindows_per_tile: Int = sliding.n
val n_kwindows_per_input: Int =
((inputShape.sizePadded - (sliding.size - sliding.stride)).toFloat /
sliding.stride).toInt
/* TODO: enforce size checks */
def Layer: FunDecl = λ(
AT(AT(AT(AT(Float, sliding.size), sliding.size), inputShape.nChannels), nKernels),
AT(Float, nKernels),
AT(AT(AT(AT(AT(Float, inputShape.sizePadded), inputShape.sizePadded), inputShape.nChannels),
inputShape.nInputs), inputShape.nBatches),
(K, B, X) => {
MapWrg(locC)(λ(AT(
TupleType(AT(AT(AT(AT(Float, sliding.size), sliding.size), n_kwindows_per_tile),
n_kwindows_per_tile),
AT(AT(AT(Float, sliding.size), sliding.size), nKernels)),
inputShape.nChannels * n_tiles_per_input * n_tiles_per_input * inputShape.nInputs),
(inputs_batch_and_K) => {
MapWrg(locB)(λ(AT(AT(AT(AT(AT(AT(AT(Float, inputShape.nChannels), kernelsPerGroup),
n_kwindows_per_input), n_kwindows_per_input), n_tiles_per_input), n_tiles_per_input),
inputShape.nInputs),
(kGroup_inputs_tiles_tiles_kwins_kwins_ks_inChs) => {
MapWrg(locA)(λ(AT(AT(AT(AT(Float, inputShape.nChannels), kernelsPerGroup),
n_kwindows_per_input), n_kwindows_per_input),
(tile_kwins_kwins_ks_inChs) => {
MapLcl(locC)(λ(AT(AT(Float, inputShape.nChannels), kernelsPerGroup),
(kwin_ks_inChs_toWrap) => {
MapLcl(locB)(λ(AT(AT(Float, inputShape.nChannels), kernelsPerGroup),
(kwin_ks_inChs) => { Join() o
MapLcl(locA)(λ(AT(Float, inputShape.nChannels),
(k_inChs) => {
ReduceSeq(add, 0.0f) $ k_inChs
})) $ kwin_ks_inChs
})) o Split(kernelsPerGroup) $ kwin_ks_inChs_toWrap
})) o Join() o Join() $ tile_kwins_kwins_ks_inChs
})) o Join() o Join() $ kGroup_inputs_tiles_tiles_kwins_kwins_ks_inChs
})) o
// TODO: Transpose((0, 1, 3, 2))
/* DEPRECATED (n_kernel_groups, nInputs, n_tiles_per_input, n_k_passes, n_tiles_per_input,
* n_k_windows, kernels_per_group, nChannels) ->
*
* (n_kernel_groups, nInputs, n_tiles_per_input * n_k_passes, n_tiles_per_input * n_k_windows,
* kernels_per_group, nChannels) ->
*
* (n_kernel_groups, nInputs, n_kwindows_per_input, n_kwindows_per_input,
* kernels_per_group, nChannels) */
// Map(Map(Join() o Map(Map(Join())))) o
/* DEPRECATED (n_kernel_groups, nInputs, n_tiles_per_input, n_tiles_per_input, n_k_passes,
* n_k_windows, kernels_per_group, nChannels) ->
*
* (n_kernel_groups, nInputs, n_tiles_per_input, n_k_passes, n_tiles_per_input,
* n_k_windows, kernels_per_group, nChannels) */
// Map(Map(Map(Transpose()))) o
/* (n_kernel_groups, nInputs, n_tiles_per_input, n_tiles_per_input, kernels_per_group,
* n_k_passes, n_k_windows, nChannels) ->
* (n_kernel_groups, nInputs, n_tiles_per_input, n_tiles_per_input, n_k_passes,
* n_k_windows, kernels_per_group, nChannels) */
Map(Map(Map(Map(Map(Transpose()) o Transpose())))) o
/* (n_kernel_groups, nInputs, nChannels, n_tiles_per_input, n_tiles_per_input,
* kernels_per_group, n_k_passes, n_k_windows) ->
* (n_kernel_groups, nInputs, n_tiles_per_input, n_tiles_per_input, kernels_per_group,
* n_k_passes, n_k_windows, nChannels) */
Map(Map(Map(Map(Map(Map(Transpose()) o Transpose()) o Transpose()) o Transpose()) o Transpose())) o
/* (n_kernel_groups, nInputs * nChannels * n_tiles_per_input * n_tiles_per_input,
* kernels_per_group, n_k_passes, n_k_windows) ->
* (n_kernel_groups, nInputs, nChannels, n_tiles_per_input, n_tiles_per_input,
* kernels_per_group, n_k_passes, n_k_windows) */
Map(Split(inputShape.nChannels) o Split(n_tiles_per_input) o Split(n_tiles_per_input)) o
//
MapWrg(locA)(λ(TupleType(AT(AT(AT(AT(Float, sliding.size), sliding.size),
n_kwindows_per_tile), n_kwindows_per_tile),
AT(AT(AT(Float, sliding.size), sliding.size), nKernels)),
(input_tile_and_K) => {
//
MapWrg(locB)(λ(TupleType(
AT(AT(AT(Float, sliding.size), sliding.size),
kernelsPerGroup), AT(Float, kernelsPerGroup)),
(kernels_group) => {
/* (kernels_per_group, n_k_passes * n_k_windows) ->
* (kernels_per_group, n_k_passes, n_k_windows) */
Map(Split(n_kwindows_per_tile)) o
/* (n_passes * n_windows, kernels_per_group) ->
* (kernels_per_group, n_k_passes * n_k_windows) */
TransposeW() o
MapLcl(locC)(λ((pass_window) => {
λ(AT(AT(Float, sliding.size), kernelsPerGroup), (partially_reduced_window) =>
// TODO: add if conditional to remove final reduce in case els_per_thread == kernel_size
ReduceWindowAndAddBias()(partially_reduced_window, /* biases */Get(kernels_group, 1))) o
/* (kernel_sliding.size, kernels_per_group) -> (kernels_per_group, kernel_sliding.size) */
TransposeW() o
MapLcl(locB)(λ((window_row, kernels_row) => {
ReduceRow() o
// (kernels_per_group, kernel_sliding.size / els_per_thread)
Map(Join(/*tiles of elements*/)/* o
MapSeq(/* Dissolve one-element output of Reduce */Join())*/) o
Split(sliding.size / seqElsPerThread) o
MapLcl(locA)(WeighElementSequence()) o
/* (kernels_per_group, kernel_sliding.size / els_per_thread, els_per_thread, tuple of input_shape.nChannels) ->
* (kernels_per_group * kernel_sliding.size / els_per_thread, els_per_thread, tuple of input_shape.nChannels)*/
Join() o
/* (kernels_per_group, kernel_sliding.size, input_shape.nChannels) ->
* (kernels_per_group, kernel_sliding.size / els_per_thread, els_per_thread, tuple of input_shape.nChannels) */
Map(/* for each kernel in the tile */
λ((kernel_row) => Split(seqElsPerThread) $
Zip(
/*ReorderStride(els_per_thread) $ */window_row,
/*ReorderStride(input_shape.nChannels) $ */ debug.PrintTypeInConsole("kernel_row") $ kernel_row))) o
/* (kernel_sliding.size, input_shape.nChannels, kernels_per_group) ->
* (kernels_per_group, kernel_sliding.size, input_shape.nChannels) */
Transpose() o Map(Transpose()) $ kernels_row
})) $ Zip(pass_window, RestoreKernelShape() $ /* weights */ Get(kernels_group, 0))
})) o toLocal(MapLcl(locC)(λ((pass_window) =>
MapLcl(locA)(λ((window_row) => {
MapLcl(locB)(id) $ window_row
})) $ pass_window))) o
/* (n_passes, n_windows, n_rows) -> (n_passes*n_windows, n_rows) */
Join() $ Get(input_tile_and_K, 0)
})) $ ReshapeAndGroupKernels()(Get(input_tile_and_K, 1), B)
})) $ inputs_batch_and_K
})) $ MatchXandK()(SlideX() $ X, K)
}
)
/* Produces a tiled slided version of X.
* Returns:
* AT(AT(AT(AT(AT(AT(AT(Float, kernel_sliding.size), kernel_sliding.size), n_kwindows_per_tile),
* n_kwindows_per_tile), input_shape.nChannels),
* n_tiles_per_input * n_tiles_per_input * input_shape.nInputs), input_shape.nBatches) */
def SlideX(): FunDecl =
λ(AT(AT(AT(AT(AT(Float, inputShape.sizePadded), inputShape.sizePadded), inputShape.nChannels),
inputShape.nInputs), inputShape.nBatches), (X) =>
debug.PrintTypeInConsole("After SLIDE") o Map(Join() o Map(Transpose() o Map(Join() o
TiledSlidedND(2)(sliding.size, sliding.stride, tiling.stride)))) $ X)
/* Matches X and K based on input channels
* Returns:
* AT(AT(TupleType(
* AT(AT(AT(AT(Float, kernel_sliding.size), kernel_sliding.size), n_kwindows_per_tile),
* n_kwindows_per_tile),
* AT(AT(AT(Float, kernel_sliding.size), kernel_sliding.size), n_kernels)),
* input_shape.nChannels * n_tiles_per_input * n_tiles_per_input * input_shape.nInputs), input_shape.nBatches) */
def MatchXandK(): FunDecl =
λ(AT(AT(AT(AT(AT(AT(AT(Float, sliding.size), sliding.size), n_kwindows_per_tile),
n_kwindows_per_tile), inputShape.nChannels),
n_tiles_per_input * n_tiles_per_input * inputShape.nInputs), inputShape.nBatches),
AT(AT(AT(AT(Float, sliding.size), sliding.size), inputShape.nChannels), nKernels),
(X, K) =>
Map(Join() o Map(λ((tile_inChs_kwins_kwins_rows_els) => {
Zip(tile_inChs_kwins_kwins_rows_els, Transpose() $ K)
}))) $ X)
/* Reshapes kernels -- makes the output channels th e outermost dimension -- and groups them.
* Returns:
* AT(TupleType(
* /* weights */ AT(AT(AT(Float, kernel_sliding.size), kernel_sliding.size), kernels_per_group),
* /* biases */ AT(Float, kernels_per_group)),
* n_kernel_groups) */
def ReshapeAndGroupKernels(): FunDecl =
λ(AT(AT(AT(Float, sliding.size), sliding.size), nKernels),
//AT(AT(AT(AT(Float, n_kernels), input_shape.nChannels), kernel_sliding.size), kernel_sliding.size),
AT(Float, nKernels), (kernels, biases) =>
Zip(Split(kernelsPerGroup) $ kernels, Split(kernelsPerGroup) $ biases))
/* Reshapes the kernel back to the original shape, where output channels are the lowest dimension
* of the tensor.
* TODO: get rid of this - there is no advantage of having this
* Returns:
* AT(AT(AT(AT(Float, kernels_per_group), input_shape.nChannels), kernel_sliding.size), kernel_sliding.size) */
def RestoreKernelShape(): FunDecl =
λ(AT(AT(AT(AT(Float, inputShape.nChannels), sliding.size), sliding.size), kernelsPerGroup),
(kernels_tile) =>
Map(Map(Transpose()) o Transpose()) o Transpose() $ kernels_tile)
/* Computes a weighted sum of a batch of elements for one output channel.
* Returns:
* AT(Float, 1) */
def WeighElementSequence(): FunDecl =
λ(AT(TupleType(Float, Float), seqElsPerThread),
(tile_of_els) => {
/* Compute a sum of the whole batch */
MapSeq(toLocal(id)) o ReduceSeq(add, toPrivate(id) $ 0.0f) o
/* Compute sums of each element separately */
MapSeq(λ(TupleType(Float, Float),
(single_element) =>
// TODO: make sure mult doesn't save to global memory first
toPrivate(id) $ mult(Get(single_element, 0), Get(single_element, 1))
)) o debug.PrintTypeInConsole("tile_of_els") $ tile_of_els
})
/* Reduces weighted pass window rows for each channel.
* NB: Rows are already partially reduced by the factor of els_per_thread in WeighElementSequence()
* Returns:
* AT(Float, kernels_per_group) */
def ReduceRow(): FunDecl =
λ(AT(AT(Float, sliding.size / seqElsPerThread), kernelsPerGroup),
(weighted_row) => {
Join() o MapLcl(locA)(λ((weighted_row_per_out_ch) => {
MapSeq(toLocal(id)) o ReduceSeq(add, 0.0f) $ weighted_row_per_out_ch
})) $ weighted_row
})
/* Reduces weighted pass windows for each channel.
* Returns:
* AT(Float, kernels_per_group) */
def ReduceWindowAndAddBias(): FunDecl =
λ(AT(AT(Float, sliding.size), kernelsPerGroup),
AT(Float, kernelsPerGroup),
(partially_reduced_windows, biases) => {
Join() o Join() o
MapLcl(locB)(λ((reduced_rows_to_wrap, bias) => {
MapLcl(locA)(λ((reduced_rows) => {
// Reduce weighted pass window separately for each output channel
MapSeq(toGlobal(activationF)) o ReduceSeq(add, toPrivate(id) $ bias) $ reduced_rows
})) o /* Wrap into an array of 1 element. This is to avoid race condition in MapLcl(a) by using 1 thread. */
Split(sliding.size) $ reduced_rows_to_wrap
})) $ Zip(partially_reduced_windows, biases)
})
Array(Layer)
}
def apply(iP: InitParameters): Conv = {
/**
* Class factory: verifies that an object can be created,
* initializes variables, computes workgroup sizes.
*/
val exceptionMsgPrefix: String = "In the Conv layer with the following configuration:\\n" +
conv.configToString(iP.inputShape.size, -1, iP.optParams.elsPerThread, iP.dim.nKernels,
iP.optParams.kernelsPerGroup, iP.optParams.vectorLen,
iP.optParams.coalesce, iP.optParams.unrollReduce,
iP.dim.kernelSize, iP.dim.kernelStride, iP.optParams.inputTileSize)
/* Tiles */
val kernelSliding: SlidingWindowConfig = SlidingWindowConfig(
size = iP.dim.kernelSize,
stride = iP.dim.kernelStride,
n = {
val n: Float = (iP.optParams.inputTileSize - (iP.dim.kernelSize - iP.dim.kernelStride)).toFloat /
iP.dim.kernelStride
if (n % 1 != 0) throw new java.lang.IllegalArgumentException(exceptionMsgPrefix +
f"input tiles (${iP.optParams.inputTileSize}%d) are not divisible by the chosen " +
f"kernelSize (${iP.dim.kernelSize}%d) and kernelStride (${iP.dim.kernelStride}%d)")
n.toInt
},
nChannels = iP.dim.nKernels
)
val inputTiling: SlidingWindowConfig = {
val stride = iP.optParams.inputTileSize - (kernelSliding.size - kernelSliding.stride)
SlidingWindowConfig(
size = iP.optParams.inputTileSize,
stride = stride,
// TODO: change to n = ceil((iP.inputShape.size - iP.inputTileSize + stride) / stride)
// It's the same, but makes more sense
n = 1 + Math.ceil((iP.inputShape.size - iP.optParams.inputTileSize).toFloat / stride).toInt)
}
/* Check parameters */
if (iP.dim.nKernels % iP.optParams.kernelsPerGroup != 0)
throw new java.lang.IllegalArgumentException(exceptionMsgPrefix +
f"the number of kernels (${iP.dim.nKernels}%d) must be divisible by " +
f"kernelsPerGroup (${iP.optParams.kernelsPerGroup}%d)")
if (kernelSliding.size % iP.optParams.elsPerThread != 0)
throw new java.lang.IllegalArgumentException(exceptionMsgPrefix +
f"kernel size in all dimensions (=${kernelSliding.size}%d) must be divisible by elsPerThread " +
f"(${iP.optParams.elsPerThread}%d)")
/* Padding */
// Calculate how much padding is required
// TODO: change to sizePadded = inputTiling.stride * inputTiling.n + (inputTiling.size - inputTiling.stride)
// It's the same, but makes more sense
iP.inputShape.sizePadded = inputTiling.size + inputTiling.stride * (inputTiling.n - 1)
println(f"inputTiling.n=${inputTiling.n}%d")
println(f"iP.inputShape.sizePadded=${iP.inputShape.sizePadded}%d")
val outputShape: Shape = { Shape(
nBatches = iP.inputShape.nBatches,
nInputs = iP.inputShape.nInputs,
size = ((iP.inputShape.size - (kernelSliding.size - kernelSliding.stride)).toFloat / kernelSliding.stride).toInt,
sizePadded = {
val sizePadded: Float = (iP.inputShape.sizePadded - (kernelSliding.size - kernelSliding.stride)).toFloat /
kernelSliding.stride
if (sizePadded % 1 != 0) throw new java.lang.IllegalArgumentException(exceptionMsgPrefix +
"padded inputs are not divisible by the chosen kernelShape and kernelStride")
sizePadded.toInt
},
nChannels = iP.dim.nKernels)
}
/* Parallelization parameters */
val localSize: Array[Int] = Array.fill[Int](3)(0)
localSize(locA) = (iP.optParams.kernelsPerGroup *
Math.ceil(kernelSliding.size.toFloat / iP.optParams.elsPerThread)).toInt
localSize(locB) = kernelSliding.size
// TODO: make sure it is smaller than 64 (clinfo)
localSize(locC) = scala.math.pow(kernelSliding.n, 2).toInt
{
val groupSize: Int = localSize(0) * localSize(1) * localSize(2)
if (groupSize > nn.maxWorkGroupSize)
throw new java.lang.IllegalArgumentException(exceptionMsgPrefix +
f"group size (==$groupSize%d) must be less or equal to maxWorkGroupSize (${nn.maxWorkGroupSize}%d).\\n" +
f"Decrease nKernelsPerGroup or inputTileSize or increase elsPerThread (${iP.optParams.elsPerThread}%d)")
}
val globalSize: Array[Int] = Array.fill[Int](3)(0)
globalSize(locA) = localSize(locA) * iP.inputShape.nInputs * inputTiling.n * inputTiling.n * iP.inputShape.nChannels
globalSize(locB) = localSize(locB) * Math.ceil(iP.dim.nKernels.toFloat / iP.optParams.kernelsPerGroup).toInt
globalSize(locC) = localSize(locC) * iP.inputShape.nBatches
/* Now that all parameters are calculated and verified, build the layer */
new Conv1(
iP.liftFPropFactory(iP.activationFun, iP.inputShape, inputTiling,
iP.dim.nKernels,kernelSliding, iP.optParams.kernelsPerGroup, iP.optParams.elsPerThread, iP.optParams.vectorLen,
iP.optParams.coalesce, iP.optParams.unrollReduce),
iP.inputShape, outputShape,
inputTiling, kernelSliding,
iP.optParams.elsPerThread, iP.optParams.kernelsPerGroup, iP.optParams.vectorLen,
iP.optParams.coalesce, iP.optParams.unrollReduce,
localSize, globalSize)
}
/* Padding */
def pad(inputs: PaddedArray[Array5D[Float]], inputShape: Shape): Unit = {
inputs.padded =
Array.fill[Array4D[Float]](inputShape.nBatches)(
Array.fill[Array3D[Float]](inputShape.nInputs)(
Array.fill[Array2D[Float]](inputShape.nChannels)(
Array.fill[Array[Float]](inputShape.sizePadded)(
Array.fill[Float](inputShape.sizePadded)(0)))))
// Add empty elements to lines
for {b <- 0 until inputShape.nBatches; i <- 0 until inputShape.nInputs; c <- 0 until inputShape.nChannels
h <- 0 until inputShape.size}
inputs.padded(b)(i)(c)(h) = inputs.nonPadded(b)(i)(c)(h).padTo(inputShape.sizePadded, 0.0f)
// Add empty lines
for {b <- 0 until inputShape.nBatches; i <- 0 until inputShape.nInputs; c <- 0 until inputShape.nChannels}
inputs.padded(b)(i)(c) = inputs.padded(b)(i)(c).padTo(
inputShape.sizePadded,
Array.fill[Float](inputShape.sizePadded)(0))
// // Add empty lines
// for {b <- 0 until inputShape.nBatches; i <- 0 until inputShape.nInputs}
// inputs.padded(b)(i) = inputs.nonPadded(b)(i).padTo(
// inputShape.sizePadded,
// Array.fill[Array[Float]](inputShape.sizePadded)(
// Array.fill[Float](inputShape.nChannels)(0)))
// // Add empty elements to lines
// for {b <- 0 until inputShape.nBatches; i <- 0 until inputShape.nInputs; h <- 0 until inputShape.size}
// inputs.padded(b)(i)(h) = inputs.nonPadded(b)(i)(h).padTo(
// inputShape.sizePadded,
// Array.fill[Float](inputShape.nChannels)(0))
}
}
/**
* Case class for storing the layer configuration.
* Configuration is to be preprocessed and verified by the companion object below.
* @param liftFProp
* @param inputShape
* @param outputShape* @param inputTiling
* @param kernelSliding
* @param elsPerThread
* @param kernelsPerGroup
* @param localSize
* @param globalSize
*/
case class Conv1(override val liftFProp: Array[FunDecl],
override val inputShape: Shape, override val outputShape: Shape,
override val inputTiling: SlidingWindowConfig, override val kernelSliding: SlidingWindowConfig,
override val elsPerThread: Int, override val kernelsPerGroup: Int,
override val vectorLen: Int, override val coalesce: Boolean, override val unrollReduce: Boolean,
override val localSize: Array[Int], override val globalSize: Array[Int])
extends Conv(liftFProp, inputShape, outputShape, inputTiling, kernelSliding,
elsPerThread, kernelsPerGroup, vectorLen, coalesce, unrollReduce, localSize, globalSize) {
override def toString: String =
nn.conv.configToString(inputShape.size, outputShape.sizePadded, elsPerThread, outputShape.nChannels,
kernelsPerGroup, vectorLen, coalesce, unrollReduce, kernelSliding.size, kernelSliding.stride, inputTiling.size)
var runtime: Double = 0
def groupAndUnpad(outputsFlat: Array[Float], datasets: NetDatasets): Unit = {
datasets.asInstanceOf[ConvDatasets].outputs.nonPadded =
nn.group(outputsFlat, (outputShape.nBatches, outputShape.nInputs,
outputShape.nChannels, outputShape.sizePadded, outputShape.sizePadded)).map(
batch => batch.map(
input => input.map(
channel => channel.map(
row => row.slice(0, outputShape.size)
).slice(0, outputShape.size))))
}
} | lift-project/lift | src/test/nn/conv/versions/Conv1.scala | Scala | mit | 26,173 |
package org.refptr.iscala
import sun.misc.{Signal,SignalHandler}
import org.zeromq.ZMQ
import scalax.io.JavaConverters._
import scalax.file.Path
import json.JsonUtil._
import msg._
object IScala extends App {
private val options = new Options(args)
def config: Options#Config = options.config
val thread = new Thread {
override def run() {
val iscala = new IScala(options.config)
iscala.heartBeat.join()
}
}
thread.setName("IScala")
thread.setDaemon(true)
thread.start()
thread.join()
}
class IScala(config: Options#Config) extends Parent {
val connection = config.connection_file match {
case Some(path) => path.string.as[Connection]
case None =>
val file = Path(s"kernel-${Util.getpid()}.json")
logger.info(s"connect ipython with --existing ${file.toAbsolute.path}")
val connection = Connection.default
file.write(toJSON(connection))
connection
}
val classpath = {
val (baseClasspath, baseModules) = config.javacp match {
case false => ("", Modules.Compiler :: Nil)
case true => (sys.props("java.class.path"), Nil)
}
val modules = baseModules ++ config.modules
val resolvers = config.resolvers
val resolved = Sbt.resolve(modules, resolvers).map(_.classpath) getOrElse {
sys.error("Failed to resolve dependencies")
}
ClassPath.join(baseClasspath, config.classpath, resolved)
}
val interpreter = new Interpreter(classpath, config.args)
val zmq = new Sockets(connection)
val ipy = new Communication(zmq, connection)
def welcome() {
import scala.util.Properties._
println(s"Welcome to Scala $versionNumberString ($javaVmName, Java $javaVersion)")
}
Runtime.getRuntime().addShutdownHook(new Thread() {
override def run() {
logger.debug("Terminating IScala")
interpreter.finish()
}
})
Signal.handle(new Signal("INT"), new SignalHandler {
private var previously = System.currentTimeMillis
def handle(signal: Signal) {
if (!config.parent) {
val now = System.currentTimeMillis
if (now - previously < 500) sys.exit() else previously = now
}
interpreter.cancel()
}
})
class HeartBeat extends Thread {
override def run() {
ZMQ.proxy(zmq.heartbeat, zmq.heartbeat, null)
}
}
(config.connection_file, config.parent) match {
case (Some(file), true) =>
// This setup means that this kernel was started by IPython. Currently
// IPython is unable to terminate IScala without explicitly killing it
// or sending shutdown_request. To fix that, IScala watches the profile
// file whether it exists or not. When the file is removed, IScala is
// terminated.
class FileWatcher(file: Path, interval: Int) extends Thread {
override def run() {
while (true) {
if (file.exists) Thread.sleep(interval)
else sys.exit()
}
}
}
val fileWatcher = new FileWatcher(file, 1000)
fileWatcher.setName(s"FileWatcher(${file.path})")
fileWatcher.start()
case _ =>
}
val ExecuteHandler = new ExecuteHandler(this)
val CompleteHandler = new CompleteHandler(this)
val KernelInfoHandler = new KernelInfoHandler(this)
val ObjectInfoHandler = new ObjectInfoHandler(this)
val ConnectHandler = new ConnectHandler(this)
val ShutdownHandler = new ShutdownHandler(this)
val HistoryHandler = new HistoryHandler(this)
val CommOpenHandler = new CommOpenHandler(this)
val CommMsgHandler = new CommMsgHandler(this)
val CommCloseHandler = new CommCloseHandler(this)
class Conn(msg: Msg[_]) extends display.Conn {
def display_data(data: Data) {
ipy.send_display_data(msg, data)
}
}
class EventLoop(socket: ZMQ.Socket) extends Thread {
def dispatch[T <: FromIPython](msg: Msg[T]) {
display.IScala.withConn(new Conn(msg)) {
msg.header.msg_type match {
case MsgType.execute_request => ExecuteHandler(socket, msg.asInstanceOf[Msg[execute_request]])
case MsgType.complete_request => CompleteHandler(socket, msg.asInstanceOf[Msg[complete_request]])
case MsgType.kernel_info_request => KernelInfoHandler(socket, msg.asInstanceOf[Msg[kernel_info_request]])
case MsgType.object_info_request => ObjectInfoHandler(socket, msg.asInstanceOf[Msg[object_info_request]])
case MsgType.connect_request => ConnectHandler(socket, msg.asInstanceOf[Msg[connect_request]])
case MsgType.shutdown_request => ShutdownHandler(socket, msg.asInstanceOf[Msg[shutdown_request]])
case MsgType.history_request => HistoryHandler(socket, msg.asInstanceOf[Msg[history_request]])
case MsgType.comm_open => CommOpenHandler(socket, msg.asInstanceOf[Msg[comm_open]])
case MsgType.comm_msg => CommMsgHandler(socket, msg.asInstanceOf[Msg[comm_msg]])
case MsgType.comm_close => CommCloseHandler(socket, msg.asInstanceOf[Msg[comm_close]])
case _ =>
}
}
}
override def run() {
try {
while (true) {
ipy.recv(socket).foreach(dispatch)
}
} catch {
case exc: Exception =>
zmq.terminate() // this will gracefully terminate heartbeat
throw exc
}
}
}
val heartBeat = new HeartBeat
heartBeat.setName("HeartBeat")
heartBeat.start()
logger.debug("Starting kernel event loop")
ipy.send_status(ExecutionState.starting)
val requestsLoop = new EventLoop(zmq.requests)
requestsLoop.setName("RequestsEventLoop")
requestsLoop.start()
welcome()
}
| nkhuyu/IScala | src/main/scala/IScala.scala | Scala | mit | 6,388 |
package twitter.github.io.finatra.quickstart
import com.google.inject.testing.fieldbinder.Bind
import com.twitter.finagle.http.Status._
import com.twitter.finatra.http.test.EmbeddedHttpServer
import com.twitter.inject.Mockito
import com.twitter.inject.server.FeatureTest
import com.twitter.util.Future
import finatra.quickstart.TwitterCloneServer
import finatra.quickstart.domain.{Status, StatusId}
import finatra.quickstart.firebase.FirebaseClient
import finatra.quickstart.services.IdService
class TwitterCloneFeatureTest extends FeatureTest with Mockito {
override val server = new EmbeddedHttpServer(
twitterServer = new TwitterCloneServer {
override val overrideModules = Seq(integrationTestModule)
})
@Bind val firebaseClient = smartMock[FirebaseClient]
@Bind val idService = smartMock[IdService]
"tweet creation" in {
idService.getId returns Future(StatusId("123"))
val mockStatus = Status(
id = StatusId("123"),
text = "Hello #SFScala",
lat = Some(37.7821120598956),
long = Some(-122.400612831116),
sensitive = false)
firebaseClient.put("/statuses/123.json", mockStatus) returns Future.Unit
firebaseClient.get("/statuses/123.json")(manifest[Status]) returns Future(Option(mockStatus))
val result = server.httpPost(
path = "/tweet",
postBody = """
{
"message": "Hello #SFScala",
"location": {
"lat": "37.7821120598956",
"long": "-122.400612831116"
},
"sensitive": false
}""",
andExpect = Created,
withJsonBody = """
{
"id": "123",
"message": "Hello #SFScala",
"location": {
"lat": "37.7821120598956",
"long": "-122.400612831116"
},
"sensitive": false
}""")
server.httpGet(
path = result.location.get,
andExpect = Ok,
withJsonBody = result.contentString)
}
"Post bad tweet" in {
server.httpPost(
path = "/tweet",
postBody = """
{
"message": "",
"location": {
"lat": "9999"
},
"sensitive": "abc"
}""",
andExpect = BadRequest,
withJsonBody = """
{
"errors" : [
"message: size [0] is not between 1 and 140",
"location.lat: [9999.0] is not between -85 and 85",
"location.long: field is required",
"sensitive: 'abc' is not a valid boolean"
]
}
""")
}
}
| tom-chan/finatra | examples/finatra-twitter-clone/src/test/scala/twitter/github/io/finatra/quickstart/TwitterCloneFeatureTest.scala | Scala | apache-2.0 | 2,547 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.kafka010
import java.{util => ju}
import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer
import scala.util.control.NonFatal
import org.apache.kafka.clients.consumer.{Consumer, ConsumerConfig, OffsetAndTimestamp}
import org.apache.kafka.common.TopicPartition
import org.apache.spark.SparkEnv
import org.apache.spark.internal.Logging
import org.apache.spark.scheduler.ExecutorCacheTaskLocation
import org.apache.spark.sql.catalyst.util.CaseInsensitiveMap
import org.apache.spark.util.{UninterruptibleThread, UninterruptibleThreadRunner}
/**
* This class uses Kafka's own [[org.apache.kafka.clients.consumer.KafkaConsumer]] API to
* read data offsets from Kafka.
* The [[ConsumerStrategy]] class defines which Kafka topics and partitions should be read
* by this source. These strategies directly correspond to the different consumption options
* in. This class is designed to return a configured
* [[org.apache.kafka.clients.consumer.KafkaConsumer]] that is used by the
* [[KafkaSource]] to query for the offsets. See the docs on
* [[org.apache.spark.sql.kafka010.ConsumerStrategy]]
* for more details.
*
* Note: This class is not ThreadSafe
*/
private[kafka010] class KafkaOffsetReaderConsumer(
consumerStrategy: ConsumerStrategy,
override val driverKafkaParams: ju.Map[String, Object],
readerOptions: CaseInsensitiveMap[String],
driverGroupIdPrefix: String) extends KafkaOffsetReader with Logging {
/**
* [[UninterruptibleThreadRunner]] ensures that all
* [[org.apache.kafka.clients.consumer.KafkaConsumer]] communication called in an
* [[UninterruptibleThread]]. In the case of streaming queries, we are already running in an
* [[UninterruptibleThread]], however for batch mode this is not the case.
*/
val uninterruptibleThreadRunner = new UninterruptibleThreadRunner("Kafka Offset Reader")
/**
* Place [[groupId]] and [[nextId]] here so that they are initialized before any consumer is
* created -- see SPARK-19564.
*/
private var groupId: String = null
private var nextId = 0
/**
* A KafkaConsumer used in the driver to query the latest Kafka offsets. This only queries the
* offsets and never commits them.
*/
@volatile protected var _consumer: Consumer[Array[Byte], Array[Byte]] = null
protected def consumer: Consumer[Array[Byte], Array[Byte]] = synchronized {
assert(Thread.currentThread().isInstanceOf[UninterruptibleThread])
if (_consumer == null) {
val newKafkaParams = new ju.HashMap[String, Object](driverKafkaParams)
if (driverKafkaParams.get(ConsumerConfig.GROUP_ID_CONFIG) == null) {
newKafkaParams.put(ConsumerConfig.GROUP_ID_CONFIG, nextGroupId())
}
_consumer = consumerStrategy.createConsumer(newKafkaParams)
}
_consumer
}
private[kafka010] val maxOffsetFetchAttempts =
readerOptions.getOrElse(KafkaSourceProvider.FETCH_OFFSET_NUM_RETRY, "3").toInt
/**
* Number of partitions to read from Kafka. If this value is greater than the number of Kafka
* topicPartitions, we will split up the read tasks of the skewed partitions to multiple Spark
* tasks. The number of Spark tasks will be *approximately* `numPartitions`. It can be less or
* more depending on rounding errors or Kafka partitions that didn't receive any new data.
*/
private val minPartitions =
readerOptions.get(KafkaSourceProvider.MIN_PARTITIONS_OPTION_KEY).map(_.toInt)
private val rangeCalculator = new KafkaOffsetRangeCalculator(minPartitions)
private[kafka010] val offsetFetchAttemptIntervalMs =
readerOptions.getOrElse(KafkaSourceProvider.FETCH_OFFSET_RETRY_INTERVAL_MS, "1000").toLong
/**
* Whether we should divide Kafka TopicPartitions with a lot of data into smaller Spark tasks.
*/
private def shouldDivvyUpLargePartitions(numTopicPartitions: Int): Boolean = {
minPartitions.map(_ > numTopicPartitions).getOrElse(false)
}
private def nextGroupId(): String = {
groupId = driverGroupIdPrefix + "-" + nextId
nextId += 1
groupId
}
override def toString(): String = consumerStrategy.toString
override def close(): Unit = {
if (_consumer != null) uninterruptibleThreadRunner.runUninterruptibly { stopConsumer() }
uninterruptibleThreadRunner.shutdown()
}
/**
* @return The Set of TopicPartitions for a given topic
*/
private def fetchTopicPartitions(): Set[TopicPartition] =
uninterruptibleThreadRunner.runUninterruptibly {
assert(Thread.currentThread().isInstanceOf[UninterruptibleThread])
// Poll to get the latest assigned partitions
consumer.poll(0)
val partitions = consumer.assignment()
consumer.pause(partitions)
partitions.asScala.toSet
}
override def fetchPartitionOffsets(
offsetRangeLimit: KafkaOffsetRangeLimit,
isStartingOffsets: Boolean): Map[TopicPartition, Long] = {
def validateTopicPartitions(partitions: Set[TopicPartition],
partitionOffsets: Map[TopicPartition, Long]): Map[TopicPartition, Long] = {
assert(partitions == partitionOffsets.keySet,
"If startingOffsets contains specific offsets, you must specify all TopicPartitions.\\n" +
"Use -1 for latest, -2 for earliest.\\n" +
s"Specified: ${partitionOffsets.keySet} Assigned: ${partitions}")
logDebug(s"Partitions assigned to consumer: $partitions. Seeking to $partitionOffsets")
partitionOffsets
}
val partitions = fetchTopicPartitions()
// Obtain TopicPartition offsets with late binding support
offsetRangeLimit match {
case EarliestOffsetRangeLimit => partitions.map {
case tp => tp -> KafkaOffsetRangeLimit.EARLIEST
}.toMap
case LatestOffsetRangeLimit => partitions.map {
case tp => tp -> KafkaOffsetRangeLimit.LATEST
}.toMap
case SpecificOffsetRangeLimit(partitionOffsets) =>
validateTopicPartitions(partitions, partitionOffsets)
case SpecificTimestampRangeLimit(partitionTimestamps) =>
fetchSpecificTimestampBasedOffsets(partitionTimestamps,
failsOnNoMatchingOffset = isStartingOffsets).partitionToOffsets
}
}
override def fetchSpecificOffsets(
partitionOffsets: Map[TopicPartition, Long],
reportDataLoss: String => Unit): KafkaSourceOffset = {
val fnAssertParametersWithPartitions: ju.Set[TopicPartition] => Unit = { partitions =>
assert(partitions.asScala == partitionOffsets.keySet,
"If startingOffsets contains specific offsets, you must specify all TopicPartitions.\\n" +
"Use -1 for latest, -2 for earliest, if you don't care.\\n" +
s"Specified: ${partitionOffsets.keySet} Assigned: ${partitions.asScala}")
logDebug(s"Partitions assigned to consumer: $partitions. Seeking to $partitionOffsets")
}
val fnRetrievePartitionOffsets: ju.Set[TopicPartition] => Map[TopicPartition, Long] = { _ =>
partitionOffsets
}
val fnAssertFetchedOffsets: Map[TopicPartition, Long] => Unit = { fetched =>
partitionOffsets.foreach {
case (tp, off) if off != KafkaOffsetRangeLimit.LATEST &&
off != KafkaOffsetRangeLimit.EARLIEST =>
if (fetched(tp) != off) {
reportDataLoss(
s"startingOffsets for $tp was $off but consumer reset to ${fetched(tp)}")
}
case _ =>
// no real way to check that beginning or end is reasonable
}
}
fetchSpecificOffsets0(fnAssertParametersWithPartitions, fnRetrievePartitionOffsets,
fnAssertFetchedOffsets)
}
override def fetchSpecificTimestampBasedOffsets(
partitionTimestamps: Map[TopicPartition, Long],
failsOnNoMatchingOffset: Boolean): KafkaSourceOffset = {
val fnAssertParametersWithPartitions: ju.Set[TopicPartition] => Unit = { partitions =>
assert(partitions.asScala == partitionTimestamps.keySet,
"If starting/endingOffsetsByTimestamp contains specific offsets, you must specify all " +
s"topics. Specified: ${partitionTimestamps.keySet} Assigned: ${partitions.asScala}")
logDebug(s"Partitions assigned to consumer: $partitions. Seeking to $partitionTimestamps")
}
val fnRetrievePartitionOffsets: ju.Set[TopicPartition] => Map[TopicPartition, Long] = { _ => {
val converted = partitionTimestamps.map { case (tp, timestamp) =>
tp -> java.lang.Long.valueOf(timestamp)
}.asJava
val offsetForTime: ju.Map[TopicPartition, OffsetAndTimestamp] =
consumer.offsetsForTimes(converted)
offsetForTime.asScala.map { case (tp, offsetAndTimestamp) =>
if (failsOnNoMatchingOffset) {
assert(offsetAndTimestamp != null, "No offset matched from request of " +
s"topic-partition $tp and timestamp ${partitionTimestamps(tp)}.")
}
if (offsetAndTimestamp == null) {
tp -> KafkaOffsetRangeLimit.LATEST
} else {
tp -> offsetAndTimestamp.offset()
}
}.toMap
}
}
val fnAssertFetchedOffsets: Map[TopicPartition, Long] => Unit = { _ => }
fetchSpecificOffsets0(fnAssertParametersWithPartitions, fnRetrievePartitionOffsets,
fnAssertFetchedOffsets)
}
private def fetchSpecificOffsets0(
fnAssertParametersWithPartitions: ju.Set[TopicPartition] => Unit,
fnRetrievePartitionOffsets: ju.Set[TopicPartition] => Map[TopicPartition, Long],
fnAssertFetchedOffsets: Map[TopicPartition, Long] => Unit): KafkaSourceOffset = {
val fetched = partitionsAssignedToConsumer {
partitions => {
fnAssertParametersWithPartitions(partitions)
val partitionOffsets = fnRetrievePartitionOffsets(partitions)
partitionOffsets.foreach {
case (tp, KafkaOffsetRangeLimit.LATEST) =>
consumer.seekToEnd(ju.Arrays.asList(tp))
case (tp, KafkaOffsetRangeLimit.EARLIEST) =>
consumer.seekToBeginning(ju.Arrays.asList(tp))
case (tp, off) => consumer.seek(tp, off)
}
partitionOffsets.map {
case (tp, _) => tp -> consumer.position(tp)
}
}
}
fnAssertFetchedOffsets(fetched)
KafkaSourceOffset(fetched)
}
override def fetchEarliestOffsets(): Map[TopicPartition, Long] = partitionsAssignedToConsumer(
partitions => {
logDebug("Seeking to the beginning")
consumer.seekToBeginning(partitions)
val partitionOffsets = partitions.asScala.map(p => p -> consumer.position(p)).toMap
logDebug(s"Got earliest offsets for partition : $partitionOffsets")
partitionOffsets
}, fetchingEarliestOffset = true)
/**
* Specific to `KafkaOffsetReaderConsumer`:
* Kafka may return earliest offsets when we are requesting latest offsets if `poll` is called
* right before `seekToEnd` (KAFKA-7703). As a workaround, we will call `position` right after
* `poll` to wait until the potential offset request triggered by `poll(0)` is done.
*/
override def fetchLatestOffsets(
knownOffsets: Option[PartitionOffsetMap]): PartitionOffsetMap =
partitionsAssignedToConsumer { partitions => {
logDebug("Seeking to the end.")
if (knownOffsets.isEmpty) {
consumer.seekToEnd(partitions)
partitions.asScala.map(p => p -> consumer.position(p)).toMap
} else {
var partitionOffsets: PartitionOffsetMap = Map.empty
/**
* Compare `knownOffsets` and `partitionOffsets`. Returns all partitions that have incorrect
* latest offset (offset in `knownOffsets` is great than the one in `partitionOffsets`).
*/
def findIncorrectOffsets(): Seq[(TopicPartition, Long, Long)] = {
var incorrectOffsets = ArrayBuffer[(TopicPartition, Long, Long)]()
partitionOffsets.foreach { case (tp, offset) =>
knownOffsets.foreach(_.get(tp).foreach { knownOffset =>
if (knownOffset > offset) {
val incorrectOffset = (tp, knownOffset, offset)
incorrectOffsets += incorrectOffset
}
})
}
incorrectOffsets.toSeq
}
// Retry to fetch latest offsets when detecting incorrect offsets. We don't use
// `withRetriesWithoutInterrupt` to retry because:
//
// - `withRetriesWithoutInterrupt` will reset the consumer for each attempt but a fresh
// consumer has a much bigger chance to hit KAFKA-7703.
// - Avoid calling `consumer.poll(0)` which may cause KAFKA-7703.
var incorrectOffsets: Seq[(TopicPartition, Long, Long)] = Nil
var attempt = 0
do {
consumer.seekToEnd(partitions)
partitionOffsets = partitions.asScala.map(p => p -> consumer.position(p)).toMap
attempt += 1
incorrectOffsets = findIncorrectOffsets()
if (incorrectOffsets.nonEmpty) {
logWarning("Found incorrect offsets in some partitions " +
s"(partition, previous offset, fetched offset): $incorrectOffsets")
if (attempt < maxOffsetFetchAttempts) {
logWarning("Retrying to fetch latest offsets because of incorrect offsets")
Thread.sleep(offsetFetchAttemptIntervalMs)
}
}
} while (incorrectOffsets.nonEmpty && attempt < maxOffsetFetchAttempts)
logDebug(s"Got latest offsets for partition : $partitionOffsets")
partitionOffsets
}
}
}
override def fetchEarliestOffsets(
newPartitions: Seq[TopicPartition]): Map[TopicPartition, Long] = {
if (newPartitions.isEmpty) {
Map.empty[TopicPartition, Long]
} else {
partitionsAssignedToConsumer(partitions => {
// Get the earliest offset of each partition
consumer.seekToBeginning(partitions)
val partitionOffsets = newPartitions.filter { p =>
// When deleting topics happen at the same time, some partitions may not be in
// `partitions`. So we need to ignore them
partitions.contains(p)
}.map(p => p -> consumer.position(p)).toMap
logDebug(s"Got earliest offsets for new partitions: $partitionOffsets")
partitionOffsets
}, fetchingEarliestOffset = true)
}
}
override def getOffsetRangesFromUnresolvedOffsets(
startingOffsets: KafkaOffsetRangeLimit,
endingOffsets: KafkaOffsetRangeLimit): Seq[KafkaOffsetRange] = {
val fromPartitionOffsets = fetchPartitionOffsets(startingOffsets, isStartingOffsets = true)
val untilPartitionOffsets = fetchPartitionOffsets(endingOffsets, isStartingOffsets = false)
// Obtain topicPartitions in both from and until partition offset, ignoring
// topic partitions that were added and/or deleted between the two above calls.
if (fromPartitionOffsets.keySet != untilPartitionOffsets.keySet) {
implicit val topicOrdering: Ordering[TopicPartition] = Ordering.by(t => t.topic())
val fromTopics = fromPartitionOffsets.keySet.toList.sorted.mkString(",")
val untilTopics = untilPartitionOffsets.keySet.toList.sorted.mkString(",")
throw new IllegalStateException("different topic partitions " +
s"for starting offsets topics[${fromTopics}] and " +
s"ending offsets topics[${untilTopics}]")
}
// Calculate offset ranges
val offsetRangesBase = untilPartitionOffsets.keySet.map { tp =>
val fromOffset = fromPartitionOffsets.get(tp).getOrElse {
// This should not happen since topicPartitions contains all partitions not in
// fromPartitionOffsets
throw new IllegalStateException(s"$tp doesn't have a from offset")
}
val untilOffset = untilPartitionOffsets(tp)
KafkaOffsetRange(tp, fromOffset, untilOffset, None)
}.toSeq
if (shouldDivvyUpLargePartitions(offsetRangesBase.size)) {
val fromOffsetsMap =
offsetRangesBase.map(range => (range.topicPartition, range.fromOffset)).toMap
val untilOffsetsMap =
offsetRangesBase.map(range => (range.topicPartition, range.untilOffset)).toMap
// No need to report data loss here
val resolvedFromOffsets = fetchSpecificOffsets(fromOffsetsMap, _ => ()).partitionToOffsets
val resolvedUntilOffsets = fetchSpecificOffsets(untilOffsetsMap, _ => ()).partitionToOffsets
val ranges = offsetRangesBase.map(_.topicPartition).map { tp =>
KafkaOffsetRange(tp, resolvedFromOffsets(tp), resolvedUntilOffsets(tp), preferredLoc = None)
}
val divvied = rangeCalculator.getRanges(ranges).groupBy(_.topicPartition)
divvied.flatMap { case (tp, splitOffsetRanges) =>
if (splitOffsetRanges.length == 1) {
Seq(KafkaOffsetRange(tp, fromOffsetsMap(tp), untilOffsetsMap(tp), None))
} else {
// the list can't be empty
val first = splitOffsetRanges.head.copy(fromOffset = fromOffsetsMap(tp))
val end = splitOffsetRanges.last.copy(untilOffset = untilOffsetsMap(tp))
Seq(first) ++ splitOffsetRanges.drop(1).dropRight(1) :+ end
}
}.toArray.toSeq
} else {
offsetRangesBase
}
}
private def getSortedExecutorList(): Array[String] = {
def compare(a: ExecutorCacheTaskLocation, b: ExecutorCacheTaskLocation): Boolean = {
if (a.host == b.host) {
a.executorId > b.executorId
} else {
a.host > b.host
}
}
val bm = SparkEnv.get.blockManager
bm.master.getPeers(bm.blockManagerId).toArray
.map(x => ExecutorCacheTaskLocation(x.host, x.executorId))
.sortWith(compare)
.map(_.toString)
}
override def getOffsetRangesFromResolvedOffsets(
fromPartitionOffsets: PartitionOffsetMap,
untilPartitionOffsets: PartitionOffsetMap,
reportDataLoss: String => Unit): Seq[KafkaOffsetRange] = {
// Find the new partitions, and get their earliest offsets
val newPartitions = untilPartitionOffsets.keySet.diff(fromPartitionOffsets.keySet)
val newPartitionInitialOffsets = fetchEarliestOffsets(newPartitions.toSeq)
if (newPartitionInitialOffsets.keySet != newPartitions) {
// We cannot get from offsets for some partitions. It means they got deleted.
val deletedPartitions = newPartitions.diff(newPartitionInitialOffsets.keySet)
reportDataLoss(
s"Cannot find earliest offsets of ${deletedPartitions}. Some data may have been missed")
}
logInfo(s"Partitions added: $newPartitionInitialOffsets")
newPartitionInitialOffsets.filter(_._2 != 0).foreach { case (p, o) =>
reportDataLoss(
s"Added partition $p starts from $o instead of 0. Some data may have been missed")
}
val deletedPartitions = fromPartitionOffsets.keySet.diff(untilPartitionOffsets.keySet)
if (deletedPartitions.nonEmpty) {
val message = if (driverKafkaParams.containsKey(ConsumerConfig.GROUP_ID_CONFIG)) {
s"$deletedPartitions are gone. ${KafkaSourceProvider.CUSTOM_GROUP_ID_ERROR_MESSAGE}"
} else {
s"$deletedPartitions are gone. Some data may have been missed."
}
reportDataLoss(message)
}
// Use the until partitions to calculate offset ranges to ignore partitions that have
// been deleted
val topicPartitions = untilPartitionOffsets.keySet.filter { tp =>
// Ignore partitions that we don't know the from offsets.
newPartitionInitialOffsets.contains(tp) || fromPartitionOffsets.contains(tp)
}.toSeq
logDebug("TopicPartitions: " + topicPartitions.mkString(", "))
val fromOffsets = fromPartitionOffsets ++ newPartitionInitialOffsets
val untilOffsets = untilPartitionOffsets
val ranges = topicPartitions.map { tp =>
val fromOffset = fromOffsets(tp)
val untilOffset = untilOffsets(tp)
if (untilOffset < fromOffset) {
reportDataLoss(s"Partition $tp's offset was changed from " +
s"$fromOffset to $untilOffset, some data may have been missed")
}
KafkaOffsetRange(tp, fromOffset, untilOffset, preferredLoc = None)
}
rangeCalculator.getRanges(ranges, getSortedExecutorList)
}
private def partitionsAssignedToConsumer(
body: ju.Set[TopicPartition] => Map[TopicPartition, Long],
fetchingEarliestOffset: Boolean = false)
: Map[TopicPartition, Long] = uninterruptibleThreadRunner.runUninterruptibly {
withRetriesWithoutInterrupt {
// Poll to get the latest assigned partitions
consumer.poll(0)
val partitions = consumer.assignment()
if (!fetchingEarliestOffset) {
// Call `position` to wait until the potential offset request triggered by `poll(0)` is
// done. This is a workaround for KAFKA-7703, which an async `seekToBeginning` triggered by
// `poll(0)` may reset offsets that should have been set by another request.
partitions.asScala.map(p => p -> consumer.position(p)).foreach(_ => {})
}
consumer.pause(partitions)
logDebug(s"Partitions assigned to consumer: $partitions.")
body(partitions)
}
}
/**
* Helper function that does multiple retries on a body of code that returns offsets.
* Retries are needed to handle transient failures. For e.g. race conditions between getting
* assignment and getting position while topics/partitions are deleted can cause NPEs.
*
* This method also makes sure `body` won't be interrupted to workaround a potential issue in
* `KafkaConsumer.poll`. (KAFKA-1894)
*/
private def withRetriesWithoutInterrupt(
body: => Map[TopicPartition, Long]): Map[TopicPartition, Long] = {
// Make sure `KafkaConsumer.poll` won't be interrupted (KAFKA-1894)
assert(Thread.currentThread().isInstanceOf[UninterruptibleThread])
synchronized {
var result: Option[Map[TopicPartition, Long]] = None
var attempt = 1
var lastException: Throwable = null
while (result.isEmpty && attempt <= maxOffsetFetchAttempts
&& !Thread.currentThread().isInterrupted) {
Thread.currentThread match {
case ut: UninterruptibleThread =>
// "KafkaConsumer.poll" may hang forever if the thread is interrupted (E.g., the query
// is stopped)(KAFKA-1894). Hence, we just make sure we don't interrupt it.
//
// If the broker addresses are wrong, or Kafka cluster is down, "KafkaConsumer.poll" may
// hang forever as well. This cannot be resolved in KafkaSource until Kafka fixes the
// issue.
ut.runUninterruptibly {
try {
result = Some(body)
} catch {
case NonFatal(e) =>
lastException = e
logWarning(s"Error in attempt $attempt getting Kafka offsets: ", e)
attempt += 1
Thread.sleep(offsetFetchAttemptIntervalMs)
resetConsumer()
}
}
case _ =>
throw new IllegalStateException(
"Kafka APIs must be executed on a o.a.spark.util.UninterruptibleThread")
}
}
if (Thread.interrupted()) {
throw new InterruptedException()
}
if (result.isEmpty) {
assert(attempt > maxOffsetFetchAttempts)
assert(lastException != null)
throw lastException
}
result.get
}
}
private def stopConsumer(): Unit = synchronized {
assert(Thread.currentThread().isInstanceOf[UninterruptibleThread])
if (_consumer != null) _consumer.close()
}
private def resetConsumer(): Unit = synchronized {
stopConsumer()
_consumer = null // will automatically get reinitialized again
}
}
| witgo/spark | external/kafka-0-10-sql/src/main/scala/org/apache/spark/sql/kafka010/KafkaOffsetReaderConsumer.scala | Scala | apache-2.0 | 24,454 |
package org.flowpaint.raster.channel
import _root_.org.flowpaint.util.Rectangle
import collection._
import org.flowpaint.raster.blend.{OpaqueBlender, Blender}
/**
*
*/
// TODO: Undo history saving - but do it with the command history - they can store the blocks of the layers that they changed
class Raster {
private var _channels: Map[Symbol, Channel] = Map()
// Map from row indexes to map from column indexes to blocks of pixel data.
// private val blocks: mutable.Map[Int, Map[Int, Block]] = new HashMap()
def channels: Map[Symbol, Channel] = _channels
// TODO: The default tiles for each channel for this raster (-> procedural tiles could also be used - converted to actual raster when modified)
// TODO: Should getBlocks instantiate missing blocks with default tiles in that case? do we even need it?
/*
def getBlocks(area: Rectangle): List[Block] = {
val xc1 = area.x1 / blockSize
val yc1 = area.y1 / blockSize
val xc2 = area.x2 / blockSize
val yc2 = area.y2 / blockSize
var result: List[Block] = Nil
var yc = yc1
while(yc <= yc2) {
val columns = blocks.getOrElse(yc, null)
if (columns != null) {
var xc = xc1
while(xc <= xc2) {
val block = columns.getOrElse(xc, null)
if (block != null) result = block :: result
xc += 1
}
}
yc += 1
}
result
}
*/
/**
* Renders the specified raster on top of this raster, for the specified area, with the specified blending function.
* The blending function to use is channel specific.
*/
def overlay(raster: Raster, area: Rectangle, channelBlenders: Map[Symbol, Blender] = Map()) {
val alpha = raster.channels.get('alpha)
raster.channels.values foreach { c =>
val channelId = c.identifier
// Add missing channels
if (!_channels.contains(channelId)) _channels += channelId -> new ChannelImpl(channelId)
// Get blending func
val blender = channelBlenders.getOrElse(channelId, OpaqueBlender)
// Overlay
_channels(channelId).blend(c, area, alpha, blender)
}
}
}
| zzorn/flowpaint | src/main/scala/org/flowpaint/raster/channel/Raster.scala | Scala | gpl-2.0 | 2,120 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package views.businessdetails
import forms.{EmptyForm, Form2, InvalidForm, ValidForm}
import jto.validation.{Path, ValidationError}
import models.businessdetails.{PreviouslyRegistered, PreviouslyRegisteredYes}
import org.scalatest.MustMatchers
import play.api.i18n.Messages
import utils.AmlsViewSpec
import views.Fixture
import views.html.businessdetails.previously_registered
class previously_registeredSpec extends AmlsViewSpec with MustMatchers {
trait ViewFixture extends Fixture {
lazy val previously_registered = app.injector.instanceOf[previously_registered]
implicit val requestWithToken = addTokenForView()
}
"previously_registered view" must {
"have correct title" in new ViewFixture {
val form2: ValidForm[PreviouslyRegistered] = Form2(PreviouslyRegisteredYes(Some("prevMLRRegNo")))
def view = previously_registered(form2, true)
doc.title must startWith(Messages("businessdetails.registeredformlr.title") + " - " + Messages("summary.businessdetails"))
}
"have correct headings" in new ViewFixture {
val form2: ValidForm[PreviouslyRegistered] = Form2(PreviouslyRegisteredYes(Some("prevMLRRegNo")))
def view = previously_registered(form2, true)
heading.html must be(Messages("businessdetails.registeredformlr.title"))
subHeading.html must include(Messages("summary.businessdetails"))
}
"show errors in the correct locations" in new ViewFixture {
val form2: InvalidForm = InvalidForm(Map.empty,
Seq(
(Path \\ "previouslyRegistered") -> Seq(ValidationError("not a message Key"))
))
def view = previously_registered(form2, true)
errorSummary.html() must include("not a message Key")
doc.getElementById("previouslyRegistered")
.getElementsByClass("error-notification").first().html() must include("not a message Key")
}
"have a back link" in new ViewFixture {
val form2: Form2[_] = EmptyForm
def view = previously_registered(form2, true)
doc.getElementsByAttributeValue("class", "link-back") must not be empty
}
}
}
| hmrc/amls-frontend | test/views/businessdetails/previously_registeredSpec.scala | Scala | apache-2.0 | 2,714 |
package com.akkademy
import akka.actor.{Status, ActorSystem}
import akka.testkit.{TestProbe, TestActorRef}
import com.akkademy.messages.SetRequest
import com.typesafe.config.ConfigFactory
import org.scalatest.{FunSpecLike, Matchers}
class AkkademyDbSpec extends FunSpecLike with Matchers {
implicit val system = ActorSystem("system", ConfigFactory.empty) //ignore config for remoting
describe("akkademyDb") {
describe("given SetRequest"){
val testProbe = TestProbe()
it("should place key/value into map"){
val actorRef = TestActorRef(new AkkademyDb)
actorRef ! SetRequest("key", "value", testProbe.ref)
val akkademyDb = actorRef.underlyingActor
akkademyDb.map.get("key") should equal(Some("value"))
}
}
describe("given List[SetRequest]"){
it("should place key/values into map"){
val testProbe = TestProbe()
val actorRef = TestActorRef(new AkkademyDb)
actorRef ! List(
SetRequest("key", "value", testProbe.ref),
SetRequest("key2", "value2", testProbe.ref)
)
val akkademyDb = actorRef.underlyingActor
akkademyDb.map.get("key") should equal(Some("value"))
akkademyDb.map.get("key2") should equal(Some("value2"))
testProbe.expectMsg(Status.Success)
testProbe.expectMsg(Status.Success)
}
}
}
}
| jasongoodwin/learning-akka | ch4/akkademy-db-scala/src/test/scala/com/akkademy/AkkademyDbSpec.scala | Scala | apache-2.0 | 1,376 |
package dotty.tools.dotc
package transform
import ast.tpd
import core.Contexts.Context
import core.StdNames.nme
import core.Phases.Phase
import TreeTransforms.{MiniPhaseTransform, TransformerInfo}
/** Rewrite `getClass` calls as follow:
*
* For every instance of primitive class C whose boxed class is called B:
* instanceC.getClass -> B.TYPE
* For every instance of non-primitive class D:
* instanceD.getClass -> instanceD.getClass
*/
class GetClass extends MiniPhaseTransform {
import tpd._
override def phaseName: String = "getClass"
override def runsAfter: Set[Class[_ <: Phase]] = Set(classOf[Erasure])
override def transformApply(tree: Apply)(implicit ctx: Context, info: TransformerInfo): Tree = {
import ast.Trees._
tree match {
case Apply(Select(qual, nme.getClass_), Nil) => tree.clsOf(qual.tpe.widen, tree)
case _ => tree
}
}
}
| densh/dotty | src/dotty/tools/dotc/transform/GetClass.scala | Scala | bsd-3-clause | 904 |
package eu.gruchala.typelevel.base
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
import scala.language.implicitConversions
object F_MagnetPattern {
class HttpResponse
sealed trait StatusCode
object StatusCode {
case object Ok extends StatusCode
case object Bad_Request extends StatusCode
}
trait RouteOps {
//The problem
def complete(status: StatusCode): Unit
def complete(response: HttpResponse): Int
// def complete(future: Future[StatusCode]): Unit //does not compile
def complete(future: Future[HttpResponse]): Unit
def complete[T](obj: T): Int
def complete[T](statusCode: StatusCode, obj: T): Int
}
val ? = "What is Magnet pattern?"
sealed trait CompletionMagnet {
type Result
def apply(): Result
}
object CompletionMagnet {
implicit def fromStatusCode(statusCode: StatusCode): CompletionMagnet =
new CompletionMagnet {
override type Result = Int
override def apply(): Int = if (statusCode == StatusCode.Ok) 200 else 500
}
implicit def fromFutureStatusCode(future: Future[StatusCode]): CompletionMagnet =
new CompletionMagnet {
override type Result = Unit
override def apply(): Result = future onSuccess { case resp => s"log: Got $resp" }
}
//etc.
}
object MagnetRoute {
import CompletionMagnet._
def complete = ???
}
}
| leszekgruchala/typelevel-programming-scala | src/main/scala/eu/gruchala/typelevel/base/F_MagnetPattern.scala | Scala | mit | 1,424 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.types
import scala.math.Ordering
import scala.reflect.runtime.universe.typeTag
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.sql.catalyst.ScalaReflectionLock
import org.apache.spark.unsafe.types.UTF8String
/**
* :: DeveloperApi ::
* The data type representing `String` values. Please use the singleton [[DataTypes.StringType]].
*/
@DeveloperApi
class StringType private() extends AtomicType {
// The companion object and this class is separated so the companion object also subclasses
// this type. Otherwise, the companion object would be of type "StringType$" in byte code.
// Defined with a private constructor so the companion object is the only possible instantiation.
private[sql] type InternalType = UTF8String
@transient private[sql] lazy val tag = ScalaReflectionLock.synchronized { typeTag[InternalType] }
private[sql] val ordering = implicitly[Ordering[InternalType]]
/**
* The default size of a value of the StringType is 20 bytes.
*/
override def defaultSize: Int = 20
private[spark] override def asNullable: StringType = this
}
case object StringType extends StringType
| gioenn/xSpark | sql/catalyst/src/main/scala/org/apache/spark/sql/types/StringType.scala | Scala | apache-2.0 | 1,975 |
package com.typesafe.sbt.packager.archetypes
/**
* This object provides methods to generate scripts from templates. This involves
*
* <ol>
* <li>procesing - replacing a placeholders with actual values</li>
* <li>TODO: validating - check the script if there are no remaining placeholders</li>
* </ol>
*
* @example a bash script can be generated like this
* {{{
* val template = getClass getResource "template-your-bashscript"
* val replacements = Seq("name" -> "your-app", "custom" -> "1")
* TemplateWriter.generateScript(template, replacements)
* }}}
*
* @example a bat script can be generated like this
* {{{
* val template = getClass getResource "template-your-batscript"
* val replacements = Seq("name" -> "your-app", "custom" -> "1")
* TemplateWriter.generateScript(template, replacements, "\\r\\n", TemplateWriter.batFriendlyKeySurround)
* }}}
*/
object TemplateWriter {
def defaultCharset: java.nio.charset.Charset = java.nio.charset.Charset.forName("UTF-8")
def bashFriendlyKeySurround(key: String) =
"\\\\$\\\\{\\\\{" + key + "\\\\}\\\\}"
def batFriendlyKeySurround(key: String) =
"@@" + key + "@@"
private def replace(
line: String,
replacements: Seq[(String, String)],
keySurround: String => String): String = {
replacements.foldLeft(line) {
case (line, (key, value)) =>
keySurround(key).r.replaceAllIn(line, java.util.regex.Matcher.quoteReplacement(value))
}
}
private def replaceValues(
lines: Seq[String],
replacements: Seq[(String, String)],
eol: String,
keySurround: String => String): String = {
val sb = new StringBuilder
for (line <- lines) {
sb append replace(line, replacements, keySurround)
sb append eol
}
sb toString
}
def generateScript(
source: java.net.URL,
replacements: Seq[(String, String)],
eol: String = "\\n",
keySurround: String => String = bashFriendlyKeySurround,
charset: java.nio.charset.Charset = defaultCharset): String = {
val lines = sbt.IO.readLinesURL(source, charset)
replaceValues(lines, replacements, eol, keySurround)
}
def generateScriptFromString(
source: String,
replacements: Seq[(String, String)],
eol: String = "\\n",
keySurround: String => String = bashFriendlyKeySurround,
charset: java.nio.charset.Charset = defaultCharset): String = {
replaceValues(source split eol, replacements, eol, keySurround)
}
} | benmccann/sbt-native-packager | src/main/scala/com/typesafe/sbt/packager/archetypes/TemplateWriter.scala | Scala | bsd-2-clause | 2,438 |
package scalax.chart
package exporting
import java.io._
import javax.xml.bind.DatatypeConverter
import org.jfree.chart.encoders.EncoderUtil
import org.jfree.graphics2d.svg._
import com.itextpdf.awt.{ DefaultFontMapper, FontMapper, PdfGraphics2D }
import com.itextpdf.text.{ Document, Rectangle }
import com.itextpdf.text.pdf.PdfWriter
/**
* @define os stream to where will be written
* @define file the output file
*/
private[exporting] trait Exporter extends Any with DocMacros {
private[exporting] final def managed[R <: Closeable](r: R)(f: R => Unit): Unit =
try { f(r) } finally { r.close() }
}
/** Exports charts to JPEG images.
*
* @see [[module.Exporting]]
*/
class JPEGExporter(val chart: Chart) extends AnyVal with Exporter {
/** Saves the chart as a JPEG image.
*
* @param file $file
*
* @usecase def saveAsJPEG(file: String): Unit
* @inheritdoc
*/
def saveAsJPEG(file: String, resolution: (Int,Int) = Chart.Default.Resolution): Unit =
managed(new FileOutputStream(file)) { os ⇒ writeAsJPEG(os, resolution) }
/** Writes the chart as a JPEG image to the output stream.
*
* @param os $os
* @param resolution $resolution
*
* @usecase def writeAsJPEG(os: OutputStream): Unit
* @inheritdoc
*/
def writeAsJPEG(os: OutputStream, resolution: (Int,Int) = Chart.Default.Resolution): Unit =
os.write(encodeAsJPEG(resolution))
/** Writes the chart to iTerm2 window
*
* @param resolution
*
* @usecase def writeToTerm(resolution:(Int,Int) = Chart.Default.Resolution):Unit
* @inheritdoc
*/
def writeToTerm(resolution:(Int,Int) = Chart.Default.Resolution): Unit = {
val base64Jpg = DatatypeConverter.printBase64Binary(encodeAsJPEG(resolution))
print(s"\\033]1337;File=name=foo.jpg;size=${base64Jpg.length};inline=1:")
print(base64Jpg)
println("\\07")
}
/** Returns the chart as a byte encoded JPEG image.
*
* @param resolution $resolution
*
* @usecase def encodeAsJPEG(): Array[Byte]
* @inheritdoc
*/
def encodeAsJPEG(resolution: (Int, Int) = Chart.Default.Resolution): Array[Byte] = {
val (width, height) = resolution
val image = chart.peer.createBufferedImage(width, height, java.awt.image.BufferedImage.TYPE_INT_RGB, null)
EncoderUtil.encode(image, "jpeg")
}
}
/** Exports charts to PDF documents.
*
* @see [[module.Exporting]]
*
* @define fontMapper handles mappings between Java AWT Fonts and PDF fonts
*/
class PDFExporter(val chart: Chart) extends AnyVal with Exporter {
/** Returns a new default font mapper. */
final def DefaultFontMapper: FontMapper =
new DefaultFontMapper
/** Saves the chart as a PDF document.
*
* @param file $file
* @param resolution $resolution
* @param fontMapper $fontMapper
*
* @usecase def saveAsPDF(file: String): Unit
* @inheritdoc
*/
def saveAsPDF(file: String, resolution: (Int,Int) = Chart.Default.Resolution, fontMapper: FontMapper = DefaultFontMapper): Unit =
managed(new FileOutputStream(file)) { os => writeAsPDF(os, resolution, fontMapper) }
/** Writes the chart as a PDF document to the output stream.
*
* @param os $os
* @param resolution $resolution
* @param fontMapper $fontMapper
*
* @usecase def writeAsPDF(os: OutputStream): Unit
* @inheritdoc
*/
def writeAsPDF(os: OutputStream, resolution: (Int,Int) = Chart.Default.Resolution, fontMapper: FontMapper = DefaultFontMapper): Unit = {
val (width,height) = resolution
val pagesize = new Rectangle(width.toFloat, height.toFloat)
val document = new Document(pagesize)
try {
val writer = PdfWriter.getInstance(document, os)
document.open()
val cb = writer.getDirectContent
val tp = cb.createTemplate(width.toFloat, height.toFloat)
val g2 = new PdfGraphics2D(tp, width.toFloat, height.toFloat, fontMapper)
val r2D = new java.awt.geom.Rectangle2D.Double(0, 0, width.toDouble, height.toDouble)
chart.peer.draw(g2, r2D)
g2.dispose()
cb.addTemplate(tp, 0, 0)
} finally {
document.close()
}
}
}
/** Exports charts to PNG images.
*
* @see [[module.Exporting]]
*/
class PNGExporter(val chart: Chart) extends AnyVal with Exporter {
/** Saves the chart as a PNG image.
*
* @param file $file
* @param resolution $resolution
*
* @usecase def saveAsPNG(file: String): Unit
* @inheritdoc
*/
def saveAsPNG(file: String, resolution: (Int,Int) = Chart.Default.Resolution): Unit =
managed(new FileOutputStream(file)) { os ⇒ writeAsPNG(os, resolution) }
/** Writes the chart as a PNG image to the output stream.
*
* @param os $os
* @param resolution $resolution
*
* @usecase def writeAsPNG(os: OutputStream): Unit
* @inheritdoc
*/
def writeAsPNG(os: OutputStream, resolution: (Int,Int) = Chart.Default.Resolution): Unit =
os.write(encodeAsPNG(resolution))
/** Returns the chart as a byte encoded PNG image.
*
* @param resolution $resolution
*
* @usecase def encodeAsPNG(): Array[Byte]
* @inheritdoc
*/
def encodeAsPNG(resolution: (Int, Int) = Chart.Default.Resolution): Array[Byte] = {
val (width, height) = resolution
val image = chart.peer.createBufferedImage(width, height)
EncoderUtil.encode(image, "png")
}
}
/** Exports charts to SVG images.
*
* @see [[module.Exporting]]
*/
class SVGExporter(val chart: Chart) extends AnyVal with Exporter {
/** Saves the chart as a SVG image.
*
* @param file $file
* @param resolution $resolution
*
* @usecase def saveAsSVG(file: String): Unit
* @inheritdoc
*/
def saveAsSVG(file: String, resolution: (Int,Int) = Chart.Default.Resolution): Unit = {
val (width, height) = resolution
val g2 = new SVGGraphics2D(width, height)
chart.peer.draw(g2, new java.awt.Rectangle(new java.awt.Dimension(width, height)))
val svg = g2.getSVGElement
g2.dispose()
SVGUtils.writeToSVG(new File(file), svg)
}
}
| wookietreiber/scala-chart | src/main/scala/scalax/chart/exporting.scala | Scala | lgpl-3.0 | 6,100 |
trait X {
type Y
abstract val v: Y // error: abstract term
abstract def y: Y // error: abstract term
}
implicit object Z { // error: implict at toplevel
implicit case class C() // error: implicit classes may not be case classes
implicit type T = Int // error: implicit modifier cannot be used for types or traits
implicit trait U // error: implicit modifier cannot be used for types or traits
val x: X = new X {
type Y = Int
val v: Int = 1
}
var y: Int // error: only classes can have declared but undefined members
val z: Int = {
val u: Int // error: only classes can have declared but undefined members
1
}
}
trait T {
type X
def foo: Unit = {
var x: Int // error: only classes can have declared but undefined members
()
}
private def bar: Int // error: abstract member may not have private modifier
final def baz: Int // error: abstract member may not have final modifier
}
final sealed class A { // error: illegal combination of modifiers: final and sealed
private protected def f: Int = 1 // error: illegal combination of modifiers: private and protected
}
class E extends T {
abstract override def foo: Unit // error: abstract override only allowed for members of traits
}
trait U extends T {
abstract override type X // error: `abstract override' incompatible with type definition
@native def f(): Unit = 1 // error: `@native' members may not have implementation
}
trait TT extends AnyVal // error: trait TT annot extend AnyVal
final trait UU // error: trait UU may not be `final'
| densh/dotty | tests/neg/validate.scala | Scala | bsd-3-clause | 1,561 |
package com.git.huanghaifeng.spark.wordcount
import org.apache.spark.SparkContext
object WordCount4 {
def main(args: Array[String]) {
val master = args.length match {
case x: Int if x > 0 => args(0)
case _ => "local"
}
val sc = new SparkContext(master, "WordCount", System.getenv("SPARK_HOME"))
val input = args.length match {
case x: Int if x > 1 => sc.textFile(args(1))
case _ => sc.parallelize(List("pandas", "i like pandas"))
}
val words = input.flatMap(line => line.split(" "))
args.length match {
case x: Int if x > 2 => {
val counts = words.map(word => (word, 1)).reduceByKey{ case (x, y) => x + y }
counts.saveAsTextFile(args(2))
}
case _ => {
val wc = words.countByValue()
println(wc.mkString(","))
}
}
}
} | prucehuang/quickly-start-spark | src/main/scala/com/git/huanghaifeng/spark/wordcount/WordCount4.scala | Scala | apache-2.0 | 977 |
package org.example1_1.usage
trait Usage4 {
def myScope1(): Unit = {
import org.example1_1.declaration.Z
val zzz: Z = ???
import org.example1_1.declaration.Y
val yyy: Y = ???
import org.example1_1.declaration.data.X
val xxx: X = ???
}
def myScope2(): Unit = {
import org.example1_1.declaration.Z
import org.example1_1.declaration.X4
val zzz: Z = ???
import org.example1_1.declaration.Y
import org.example1_1.declaration.X5
val yyy: Y = ???
import org.example1_1.declaration.X6
import org.example1_1.declaration.data.X
val xxx: X = ???
}
} | JetBrains/intellij-scala | scala/scala-impl/testdata/move/allInOne_1/after/org/example1_1/usage/Usage4.scala | Scala | apache-2.0 | 633 |
package akka.duke.taskmanager.event
import akka.duke.taskmanager.ComposableActor
import akka.duke.taskmanager.Message.Event
trait Listener { this: ComposableActor =>
receiveBuilder += {
case e: Event => handleEvent(e)
}
def handleEvent(event: Event): Unit
}
| Taerus/taskmanager | taskmanager-core/src/main/scala/akka/duke/taskmanager/event/Listener.scala | Scala | bsd-3-clause | 275 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.evaluation
import org.apache.spark.rdd.RDD
/**
* Computes the area under the curve (AUC) using the trapezoidal rule.
*/
private[evaluation] object AreaUnderCurve {
/**
* Uses the trapezoidal rule to compute the area under the line connecting the two input points.
* @param points two 2D points stored in Seq
*/
private def trapezoid(points: Seq[(Double, Double)]): Double = {
require(points.length == 2)
val x = points.head
val y = points.last
(y._1 - x._1) * (y._2 + x._2) / 2.0
}
/**
* Returns the area under the given curve.
*
* @param curve an RDD of ordered 2D points stored in pairs representing a curve
*/
def of(curve: RDD[(Double, Double)]): Double = {
val localAreas = curve.mapPartitions { iter =>
if (iter.nonEmpty) {
var localArea = 0.0
var head = true
var firstPoint = (Double.NaN, Double.NaN)
var lastPoint = (Double.NaN, Double.NaN)
iter.sliding(2).foreach { points =>
if (head) {
firstPoint = points.head
head = false
}
lastPoint = points.last
if (points.length == 2) {
localArea += trapezoid(points)
}
}
Iterator.single((localArea, (firstPoint, lastPoint)))
} else {
Iterator.empty
}
}.collect()
localAreas.map(_._1).sum + localAreas.iterator.map(_._2)
.sliding(2).withPartial(false)
.map { case Seq((_, last1), (first2, _)) =>
trapezoid(Seq(last1, first2))
}.sum
}
/**
* Returns the area under the given curve.
*
* @param curve an iterator over ordered 2D points stored in pairs representing a curve
*/
def of(curve: Iterable[(Double, Double)]): Double = {
curve.iterator.sliding(2).withPartial(false).aggregate(0.0)(
seqop = (auc: Double, points: Seq[(Double, Double)]) => auc + trapezoid(points),
combop = _ + _
)
}
}
| ueshin/apache-spark | mllib/src/main/scala/org/apache/spark/mllib/evaluation/AreaUnderCurve.scala | Scala | apache-2.0 | 2,775 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import uk.gov.hmrc.ct.accounts.frs102.calculations.IntangibleAssetsCalculator
import uk.gov.hmrc.ct.accounts.frs102.retriever.{AbridgedAccountsBoxRetriever, Frs102AccountsBoxRetriever, FullAccountsBoxRetriever}
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.box.retriever.BoxRetriever._
case class AC122(value: Option[Int]) extends CtBoxIdentifier(name = "Net book value at [POA END]")
with CtOptionalInteger
with Input
with ValidatableBox[Frs102AccountsBoxRetriever]
with Validators {
override def validate(boxRetriever: Frs102AccountsBoxRetriever): Set[CtValidation] = {
import boxRetriever._
failIf (anyHaveValue(ac42(), ac43()))(
collectErrors(
validateMoney(value, min = 0),
validateNetBookValueMatchesTotalAssets(boxRetriever)
)
)
}
def validateNetBookValueMatchesTotalAssets(boxRetriever: Frs102AccountsBoxRetriever)() = {
failIf(this.orZero != boxRetriever.ac42().orZero) {
Set(CtValidation(None, "error.intangible.assets.note.currentNetBookValue.notEqualToAssets"))
}
}
}
object AC122 extends Calculated[AC122, Frs102AccountsBoxRetriever]
with IntangibleAssetsCalculator {
override def calculate(boxRetriever: Frs102AccountsBoxRetriever): AC122 = {
boxRetriever match {
case x: AbridgedAccountsBoxRetriever => calculateAbridgedAC122(x.ac117(), x.ac121())
case x: FullAccountsBoxRetriever => calculateFullAC122(x.ac122A(), x.ac122B())
}
}
}
| liquidarmour/ct-calculations | src/main/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC122.scala | Scala | apache-2.0 | 2,107 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.booleantype
import java.io.{File, PrintWriter}
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
import org.apache.spark.sql.Row
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach}
class BooleanDataTypesBigFileTest extends QueryTest with BeforeAndAfterEach with BeforeAndAfterAll {
val rootPath = new File(this.getClass.getResource("/").getPath
+ "../../../..").getCanonicalPath
override def beforeEach(): Unit = {
sql("drop table if exists boolean_table")
sql("drop table if exists boolean_table2")
sql("drop table if exists carbon_table")
sql("drop table if exists hive_table")
}
override def afterAll(): Unit = {
sql("drop table if exists boolean_table")
sql("drop table if exists boolean_table2")
sql("drop table if exists carbon_table")
sql("drop table if exists hive_table")
assert(BooleanFile.deleteFile(pathOfManyDataType))
assert(BooleanFile.deleteFile(pathOfOnlyBoolean))
}
val pathOfManyDataType = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanBigFile.csv"
val pathOfOnlyBoolean = s"$rootPath/integration/spark2/src/test/resources/bool/supportBooleanBigFileOnlyBoolean.csv"
val trueNum = 10000
override def beforeAll(): Unit = {
assert(BooleanFile.createBooleanFileWithOtherDataType(pathOfManyDataType, trueNum))
assert(BooleanFile.createOnlyBooleanFile(pathOfOnlyBoolean, trueNum))
}
test("Loading table: support boolean and other data type, big file") {
sql(
s"""
| CREATE TABLE boolean_table(
| intField INT,
| booleanField BOOLEAN,
| stringField STRING,
| doubleField DOUBLE,
| booleanField2 BOOLEAN
| )
| STORED BY 'carbondata'
""".stripMargin)
sql(
s"""
| LOAD DATA LOCAL INPATH '${pathOfManyDataType}'
| INTO TABLE boolean_table
| options('FILEHEADER'='intField,booleanField,stringField,doubleField,booleanField2')
""".stripMargin)
checkAnswer(
sql("select count(*) from boolean_table"),
Row(trueNum + trueNum / 10))
}
test("Inserting table: support boolean and other data type, big file") {
sql(
s"""
| CREATE TABLE boolean_table(
| intField INT,
| booleanField BOOLEAN,
| stringField STRING,
| doubleField DOUBLE,
| booleanField2 BOOLEAN
| )
| STORED BY 'carbondata'
""".stripMargin)
sql(
s"""
| CREATE TABLE boolean_table2(
| intField INT,
| booleanField BOOLEAN,
| stringField STRING,
| doubleField DOUBLE,
| booleanField2 BOOLEAN
| )
| STORED BY 'carbondata'
""".stripMargin)
sql(
s"""
| LOAD DATA LOCAL INPATH '${pathOfManyDataType}'
| INTO TABLE boolean_table
| options('FILEHEADER'='intField,booleanField,stringField,doubleField,booleanField2')
""".stripMargin)
sql("insert into boolean_table2 select * from boolean_table")
checkAnswer(
sql("select count(*) from boolean_table2"),
Row(trueNum + trueNum / 10))
}
test("Filtering table: support boolean data type, only boolean, big file") {
sql(
s"""
| CREATE TABLE boolean_table(
| booleanField BOOLEAN
| )
| STORED BY 'carbondata'
""".stripMargin)
sql(
s"""
| LOAD DATA LOCAL INPATH '${pathOfOnlyBoolean}'
| INTO TABLE boolean_table
| options('FILEHEADER'='booleanField')
""".stripMargin)
checkAnswer(
sql("select count(*) from boolean_table"),
Row(trueNum + trueNum / 10))
checkAnswer(
sql("select count(*) from boolean_table where booleanField is not null"),
Row(trueNum + trueNum / 10))
checkAnswer(
sql("select count(*) from boolean_table where booleanField is null"),
Row(0))
checkAnswer(
sql("select count(*) from boolean_table where booleanField = true"),
Row(trueNum))
checkAnswer(
sql("select count(*) from boolean_table where booleanField >= true"),
Row(trueNum))
checkAnswer(
sql("select count(*) from boolean_table where booleanField > true"),
Row(0))
checkAnswer(
sql("select count(*) from boolean_table where booleanField < true"),
Row(trueNum / 10))
checkAnswer(
sql("select count(*) from boolean_table where booleanField = false"),
Row(trueNum / 10))
checkAnswer(
sql("select count(*) from boolean_table where booleanField <= false"),
Row(trueNum / 10))
checkAnswer(
sql("select count(*) from boolean_table where booleanField > false"),
Row(trueNum))
checkAnswer(
sql("select count(*) from boolean_table where booleanField < false"),
Row(0))
checkAnswer(
sql("select count(*) from boolean_table where booleanField in (false)"),
Row(trueNum / 10))
checkAnswer(
sql("select count(*) from boolean_table where booleanField not in (false)"),
Row(trueNum))
checkAnswer(
sql("select count(*) from boolean_table where booleanField in (true,false)"),
Row(trueNum + trueNum / 10))
checkAnswer(
sql("select count(*) from boolean_table where booleanField not in (true,false)"),
Row(0))
checkAnswer(
sql("select count(*) from boolean_table where booleanField like 'f%'"),
Row(trueNum / 10))
}
test("Filtering table: support boolean and other data type, big file") {
sql(
s"""
| CREATE TABLE boolean_table(
| intField INT,
| booleanField BOOLEAN,
| stringField STRING,
| doubleField DOUBLE,
| booleanField2 BOOLEAN
| )
| STORED BY 'carbondata'
""".stripMargin)
sql(
s"""
| LOAD DATA LOCAL INPATH '${pathOfManyDataType}'
| INTO TABLE boolean_table
| options('FILEHEADER'='intField,booleanField,stringField,doubleField,booleanField2')
""".stripMargin)
checkAnswer(
sql("select booleanField from boolean_table where intField >=1 and intField <11"),
Seq(Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true))
)
checkAnswer(
sql(s"select booleanField from boolean_table where intField >='${trueNum - 5}' and intField <=${trueNum + 1}"),
Seq(Row(true), Row(true), Row(true), Row(true), Row(true), Row(false), Row(false))
)
checkAnswer(
sql(s"select count(*) from boolean_table where intField >='${trueNum - 5}' and doubleField <=${trueNum + 1} and booleanField=false"),
Seq(Row(2))
)
checkAnswer(
sql(s"select * from boolean_table where intField >4 and doubleField < 6.0"),
Seq(Row(5, true, "num5", 5.0, false))
)
checkAnswer(
sql("select count(*) from boolean_table"),
Row(trueNum + trueNum / 10))
checkAnswer(
sql("select count(*) from boolean_table where booleanField is not null"),
Row(trueNum + trueNum / 10))
checkAnswer(
sql("select count(*) from boolean_table where booleanField is null"),
Row(0))
checkAnswer(
sql("select count(*) from boolean_table where booleanField = true"),
Row(trueNum))
checkAnswer(
sql("select count(*) from boolean_table where booleanField >= true"),
Row(trueNum))
checkAnswer(
sql("select count(*) from boolean_table where booleanField > true"),
Row(0))
checkAnswer(
sql("select count(*) from boolean_table where booleanField < true"),
Row(trueNum / 10))
checkAnswer(
sql("select count(*) from boolean_table where booleanField = false"),
Row(trueNum / 10))
checkAnswer(
sql("select count(*) from boolean_table where booleanField <= false"),
Row(trueNum / 10))
checkAnswer(
sql("select count(*) from boolean_table where booleanField > false"),
Row(trueNum))
checkAnswer(
sql("select count(*) from boolean_table where booleanField < false"),
Row(0))
checkAnswer(
sql("select count(*) from boolean_table where booleanField in (false)"),
Row(trueNum / 10))
checkAnswer(
sql("select count(*) from boolean_table where booleanField not in (false)"),
Row(trueNum))
checkAnswer(
sql("select count(*) from boolean_table where booleanField in (true,false)"),
Row(trueNum + trueNum / 10))
checkAnswer(
sql("select count(*) from boolean_table where booleanField not in (true,false)"),
Row(0))
checkAnswer(
sql("select count(*) from boolean_table where booleanField like 'f%'"),
Row(trueNum / 10))
}
test("Filtering table: support boolean and other data type, big file, load twice") {
sql(
s"""
| CREATE TABLE boolean_table(
| intField INT,
| booleanField BOOLEAN,
| stringField STRING,
| doubleField DOUBLE,
| booleanField2 BOOLEAN
| )
| STORED BY 'carbondata'
""".stripMargin)
val repeat: Int = 2
for (i <- 0 until repeat) {
sql(
s"""
| LOAD DATA LOCAL INPATH '${pathOfManyDataType}'
| INTO TABLE boolean_table
| options('FILEHEADER'='intField,booleanField,stringField,doubleField,booleanField2')
""".stripMargin
)
}
checkAnswer(
sql("select booleanField from boolean_table where intField >=1 and intField <11"),
Seq(Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true),
Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true))
)
checkAnswer(
sql(s"select booleanField from boolean_table where intField >='${trueNum - 5}' and intField <=${trueNum + 1}"),
Seq(Row(true), Row(true), Row(true), Row(true), Row(true), Row(false), Row(false),
Row(true), Row(true), Row(true), Row(true), Row(true), Row(false), Row(false))
)
checkAnswer(
sql(s"select count(*) from boolean_table where intField >='${trueNum - 5}' and doubleField <=${trueNum + 1} and booleanField=false"),
Seq(Row(4))
)
checkAnswer(
sql(s"select * from boolean_table where intField >4 and doubleField < 6.0"),
Seq(Row(5, true, "num5", 5.0, false), Row(5, true, "num5", 5.0, false))
)
checkAnswer(
sql("select count(*) from boolean_table"),
Row(repeat * (trueNum + trueNum / 10)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField is not null"),
Row(repeat * (trueNum + trueNum / 10)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField is null"),
Row(0))
checkAnswer(
sql("select count(*) from boolean_table where booleanField = true"),
Row(repeat * (trueNum)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField >= true"),
Row(repeat * (trueNum)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField > true"),
Row(0))
checkAnswer(
sql("select count(*) from boolean_table where booleanField < true"),
Row(repeat * (trueNum / 10)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField = false"),
Row(repeat * (trueNum / 10)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField <= false"),
Row(repeat * (trueNum / 10)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField > false"),
Row(repeat * (trueNum)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField < false"),
Row(0))
checkAnswer(
sql("select count(*) from boolean_table where booleanField in (false)"),
Row(repeat * (trueNum / 10)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField not in (false)"),
Row(repeat * (trueNum)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField in (true,false)"),
Row(repeat * (trueNum + trueNum / 10)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField not in (true,false)"),
Row(0))
checkAnswer(
sql("select count(*) from boolean_table where booleanField like 'f%'"),
Row(repeat * (trueNum / 10)))
}
test("Sort_columns: support boolean and other data type, big file") {
sql(
s"""
| CREATE TABLE boolean_table(
| intField INT,
| booleanField BOOLEAN,
| stringField STRING,
| doubleField DOUBLE,
| booleanField2 BOOLEAN
| )
| STORED BY 'carbondata'
| TBLPROPERTIES('sort_columns'='booleanField')
""".stripMargin)
sql(
s"""
| LOAD DATA LOCAL INPATH '${pathOfManyDataType}'
| INTO TABLE boolean_table
| options('FILEHEADER'='intField,booleanField,stringField,doubleField,booleanField2')
""".stripMargin)
checkAnswer(
sql(s"select booleanField from boolean_table where intField >='${trueNum - 5}' and intField <=${trueNum + 1}"),
Seq(Row(true), Row(true), Row(true), Row(true), Row(true), Row(false), Row(false))
)
}
test("Inserting into Hive table from carbon table: support boolean data type and other format, big file") {
sql(
s"""
| CREATE TABLE carbon_table(
| intField INT,
| booleanField BOOLEAN,
| stringField STRING,
| doubleField DOUBLE,
| booleanField2 BOOLEAN
| )
| STORED BY 'carbondata'
""".stripMargin)
sql(
s"""
| CREATE TABLE hive_table(
| intField INT,
| booleanField BOOLEAN,
| stringField STRING,
| doubleField DOUBLE,
| booleanField2 BOOLEAN
| )
| ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
""".stripMargin)
sql(
s"""
| LOAD DATA LOCAL INPATH '${pathOfManyDataType}'
| INTO TABLE carbon_table
| options('FILEHEADER'='intField,booleanField,stringField,doubleField,booleanField2')
""".stripMargin)
sql("insert into hive_table select * from carbon_table")
checkAnswer(
sql(s"select booleanField from hive_table where intField >='${trueNum - 5}' and intField <=${trueNum + 1}"),
Seq(Row(true), Row(true), Row(true), Row(true), Row(true), Row(false), Row(false))
)
checkAnswer(
sql(s"select * from hive_table where intField >4 and doubleField < 6.0"),
Seq(Row(5, true, "num5", 5.0, false))
)
checkAnswer(
sql("select count(*) from hive_table"),
Row(trueNum + trueNum / 10))
checkAnswer(
sql("select count(*) from hive_table where booleanField = true"),
Row(trueNum))
checkAnswer(
sql("select count(*) from hive_table where booleanField = false"),
Row(trueNum / 10))
}
test("Inserting into carbon table from Hive table: support boolean data type and other format, big file") {
sql(
s"""
| CREATE TABLE hive_table(
| intField INT,
| booleanField BOOLEAN,
| stringField STRING,
| doubleField DOUBLE,
| booleanField2 BOOLEAN
| )
| ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
""".stripMargin)
sql(
s"""
| CREATE TABLE carbon_table(
| intField INT,
| booleanField BOOLEAN,
| stringField STRING,
| doubleField DOUBLE,
| booleanField2 BOOLEAN
| )
| STORED BY 'carbondata'
""".stripMargin)
sql(
s"""
| LOAD DATA LOCAL INPATH '${pathOfManyDataType}'
| INTO TABLE hive_table
""".stripMargin)
sql("insert into carbon_table select * from hive_table")
checkAnswer(
sql(s"select booleanField from carbon_table where intField >='${trueNum - 5}' and intField <=${trueNum + 1}"),
Seq(Row(true), Row(true), Row(true), Row(true), Row(true), Row(false), Row(false))
)
checkAnswer(
sql(s"select * from carbon_table where intField >4 and doubleField < 6.0"),
Seq(Row(5, true, "num5", 5.0, false))
)
checkAnswer(
sql("select count(*) from carbon_table"),
Row(trueNum + trueNum / 10))
checkAnswer(
sql("select count(*) from carbon_table where booleanField = true"),
Row(trueNum))
checkAnswer(
sql("select count(*) from carbon_table where booleanField = false"),
Row(trueNum / 10))
}
test("Filtering table: unsafe, support boolean and other data type, big file, load twice") {
initConf()
sql(
s"""
| CREATE TABLE boolean_table(
| intField INT,
| booleanField BOOLEAN,
| stringField STRING,
| doubleField DOUBLE,
| booleanField2 BOOLEAN
| )
| STORED BY 'carbondata'
""".stripMargin)
val repeat: Int = 2
for (i <- 0 until repeat) {
sql(
s"""
| LOAD DATA LOCAL INPATH '${pathOfManyDataType}'
| INTO TABLE boolean_table
| options('FILEHEADER'='intField,booleanField,stringField,doubleField,booleanField2')
""".stripMargin
)
}
checkAnswer(
sql("select booleanField from boolean_table where intField >=1 and intField <11"),
Seq(Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true),
Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true), Row(true))
)
checkAnswer(
sql(s"select booleanField from boolean_table where intField >='${trueNum - 5}' and intField <=${trueNum + 1}"),
Seq(Row(true), Row(true), Row(true), Row(true), Row(true), Row(false), Row(false),
Row(true), Row(true), Row(true), Row(true), Row(true), Row(false), Row(false))
)
checkAnswer(
sql(s"select count(*) from boolean_table where intField >='${trueNum - 5}' and doubleField <=${trueNum + 1} and booleanField=false"),
Seq(Row(4))
)
checkAnswer(
sql(s"select * from boolean_table where intField >4 and doubleField < 6.0"),
Seq(Row(5, true, "num5", 5.0, false), Row(5, true, "num5", 5.0, false))
)
checkAnswer(
sql("select count(*) from boolean_table"),
Row(repeat * (trueNum + trueNum / 10)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField is not null"),
Row(repeat * (trueNum + trueNum / 10)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField is null"),
Row(0))
checkAnswer(
sql("select count(*) from boolean_table where booleanField = true"),
Row(repeat * (trueNum)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField >= true"),
Row(repeat * (trueNum)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField > true"),
Row(0))
checkAnswer(
sql("select count(*) from boolean_table where booleanField < true"),
Row(repeat * (trueNum / 10)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField = false"),
Row(repeat * (trueNum / 10)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField <= false"),
Row(repeat * (trueNum / 10)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField > false"),
Row(repeat * (trueNum)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField < false"),
Row(0))
checkAnswer(
sql("select count(*) from boolean_table where booleanField in (false)"),
Row(repeat * (trueNum / 10)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField not in (false)"),
Row(repeat * (trueNum)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField in (true,false)"),
Row(repeat * (trueNum + trueNum / 10)))
checkAnswer(
sql("select count(*) from boolean_table where booleanField not in (true,false)"),
Row(0))
checkAnswer(
sql("select count(*) from boolean_table where booleanField like 'f%'"),
Row(repeat * (trueNum / 10)))
defaultConf()
}
def initConf(): Unit = {
CarbonProperties.getInstance().
addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE_LOADING,
"true")
}
def defaultConf(): Unit = {
CarbonProperties.getInstance().
addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE_LOADING,
CarbonCommonConstants.ENABLE_DATA_LOADING_STATISTICS_DEFAULT)
}
}
object BooleanFile {
def createBooleanFileWithOtherDataType(path: String, trueLines: Int): Boolean = {
try {
val write = new PrintWriter(path)
var d: Double = 0.0
for (i <- 0 until trueLines) {
write.println(i + "," + true + ",num" + i + "," + d + "," + false)
d = d + 1
}
for (i <- 0 until trueLines / 10) {
write.println((trueLines + i) + "," + false + ",num" + (trueLines + i) + "," + d + "," + true)
d = d + 1
}
write.close()
} catch {
case _: Exception => assert(false)
}
return true
}
def deleteFile(path: String): Boolean = {
try {
val file = new File(path)
file.delete()
} catch {
case _: Exception => assert(false)
}
return true
}
def createOnlyBooleanFile(path: String, num: Int): Boolean = {
try {
val write = new PrintWriter(path)
for (i <- 0 until num) {
write.println(true)
}
for (i <- 0 until num / 10) {
write.println(false)
}
write.close()
} catch {
case _: Exception => assert(false)
}
return true
}
}
| HuaweiBigData/carbondata | integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/booleantype/BooleanDataTypesBigFileTest.scala | Scala | apache-2.0 | 22,952 |
package scala.concurrent.duration
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import org.junit.Test
@RunWith(classOf[JUnit4])
class SpecialDurationsTest {
@Test
def test_11178(): Unit = {
assert(Duration(Duration.Inf.toString) eq Duration.Inf)
assert(Duration(Duration.MinusInf.toString) eq Duration.MinusInf)
assert(Duration(Duration.Undefined.toString) eq Duration.Undefined)
}
}
| scala/scala | test/junit/scala/concurrent/duration/SpecialDurationsTest.scala | Scala | apache-2.0 | 422 |
package me.invkrh.raft.core
import akka.actor.Actor
case class Holder(coll: List[Int])
// scalastyle:off println
class AddrChecker(a: Int) extends Actor {
override def receive: Receive = {
case h: Holder =>
println(a)
// println("In: " + h.toString)
// println("In: " + h.coll.toString)
// println("Before: " + h.coll)
// Thread.sleep(5000)
// println("After: " + h.coll)
context.system.terminate()
}
}
object SanityTest extends App {
val commitIndex = 5
val m = Map(1 -> 9, 2 -> 4, 3 -> 6, 4 -> 8, 5 -> 7)
val maj = (m.size + 1) / 2 + 1
val res = m.values.filter(_ > commitIndex).toList.sortBy(-_).apply(maj - 1)
println(res)
// val system = ActorSystem("SanityTest")
//
// var coll = List(1, 2, 3, 4)
// val holder = Holder(coll)
// println("Out: " + holder.toString)
// println("Out: " + holder.coll.toString)
// println("Initial: " + holder.coll)
//
// var a = 1
// val checker = system.actorOf(Props(new AddrChecker(a)))
// a = 4
//
// checker ! holder
// Thread.sleep(3000)
// coll = List(4, 3, 2, 1)
}
// scalastyle:on println
| invkrh/akka-raft | src/main/scala/me/invkrh/raft/core/SanityTest.scala | Scala | mit | 1,111 |
package im.actor.server.dialog
import akka.actor.ActorSystem
import akka.util.Timeout
import im.actor.api.rpc.peers.{ ApiPeer, ApiPeerType }
import im.actor.server.dialog.Origin.{ RIGHT, LEFT }
import im.actor.server.group.{ GroupExtension, GroupViewRegion, GroupOffice }
import scala.concurrent.Future
object DialogId {
def privat(a: Int, b: Int): PrivateDialogId = {
val (left, right) = if (a > b) (b, a) else (a, b)
PrivateDialogId(left, right)
}
def group(groupId: Int): GroupDialogId = GroupDialogId(groupId)
def fromStringId(id: String): DialogId = {
id.split('_').toList match {
case head :: tail if head == ApiPeerType.Private.id.toString ⇒
privat(tail(0).toInt, tail(1).toInt)
case head :: tail if head == ApiPeerType.Group.id.toString ⇒
group(tail.head.toInt)
case unknown ⇒ throw new Exception(s"Unknown dialogId string ${unknown}")
}
}
def toPeer(dialogId: DialogId, clientUserId: Int): ApiPeer = {
dialogId match {
case id: PrivateDialogId ⇒
val userId = if (id.left == clientUserId) id.right else id.left
ApiPeer(ApiPeerType.Private, userId)
case id: GroupDialogId ⇒
ApiPeer(ApiPeerType.Group, id.groupId)
}
}
def fromPeer(dialogId: DialogId, clientUserId: Int): ApiPeer = {
dialogId match {
case id: PrivateDialogId ⇒
val userId = if (id.left == clientUserId) id.left else id.right
ApiPeer(ApiPeerType.Private, userId)
case id: GroupDialogId ⇒
ApiPeer(ApiPeerType.Group, id.groupId)
}
}
def getParticipants(dialogId: DialogId)(implicit system: ActorSystem, timeout: Timeout): Future[Seq[Int]] = {
import system.dispatcher
dialogId match {
case PrivateDialogId(left, right) ⇒
Future.successful(Seq(left, right))
case GroupDialogId(groupId) ⇒
implicit val groupViewRegion: GroupViewRegion = GroupExtension(system).viewRegion
for {
(userIds, _, _) ← GroupOffice.getMemberIds(groupId)
} yield userIds
}
}
}
trait DialogId {
def stringId: String
}
private[dialog] trait GroupDialogIdBase extends DialogId {
def groupId: Int
override def stringId: String = s"${ApiPeerType.Group.id}_${groupId}"
}
private[dialog] trait PrivateDialogIdBase extends DialogId {
require(right >= left, "Left should be >= right")
def left: Int
def right: Int
def origin(senderUserId: Int): Origin =
if (senderUserId == left) LEFT else RIGHT
override def stringId: String = s"${ApiPeerType.Private.id}_${left}_${right}"
}
| WangCrystal/actor-platform | actor-server/actor-core/src/main/scala/im/actor/server/dialog/DialogId.scala | Scala | mit | 2,592 |
package org.jetbrains.plugins.scala
package lang.psi.light.scala
import com.intellij.psi._
import com.intellij.psi.impl.light.LightElement
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElement
import org.jetbrains.plugins.scala.lang.psi.api.base.ScModifierList
import org.jetbrains.plugins.scala.lang.psi.api.base.types.ScTypeElement
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScAnnotation
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScFunctionDeclaration
import org.jetbrains.plugins.scala.lang.psi.api.statements.params._
import org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.lang.psi.types.nonvalue.TypeParameter
import org.jetbrains.plugins.scala.lang.psi.types.result.{Success, TypeResult}
/**
* @author Alefas
* @since 03/04/14.
*/
class ScLightFunctionDeclaration(pTypes: List[List[ScType]], tParams: List[TypeParameter], rt: ScType,
val fun: ScFunctionDeclaration)
extends LightElement(fun.getManager, fun.getLanguage) with ScFunctionDeclaration {
setNavigationElement(fun)
override def getParent: PsiElement = fun.getParent
override def typeParametersClause: Option[ScTypeParamClause] = fun.typeParametersClause.map(new ScLightTypeParamClause(tParams, _))
override def paramClauses: ScParameters = new ScLightParameters(pTypes, fun)
override def returnTypeInner: TypeResult[ScType] = Success(rt, Some(this))
override def definedReturnType: TypeResult[ScType] = Success(rt, Some(this))
override def declaredType: TypeResult[ScType] = Success(rt, Some(this))
override def hasExplicitType: Boolean = true
override def hasFinalModifier: Boolean = fun.hasFinalModifier
override def hasAbstractModifier: Boolean = fun.hasAbstractModifier
override def hasModifierPropertyScala(name: String): Boolean = fun.hasModifierPropertyScala(name)
override def getModifierList: ScModifierList = fun.getModifierList
override def returnTypeElement: Option[ScTypeElement] = fun.returnTypeElement
override def name: String = fun.name
override def toString: String = fun.toString
override def nameId: PsiElement = fun.nameId
override def hasAssign: Boolean = fun.hasAssign
override def getAnnotations: Array[PsiAnnotation] = fun.getAnnotations
override def getApplicableAnnotations: Array[PsiAnnotation] = fun.getApplicableAnnotations
override def findAnnotation(qualifiedName: String): PsiAnnotation = fun.findAnnotation(qualifiedName)
override def addAnnotation(qualifiedName: String): PsiAnnotation = fun.addAnnotation(qualifiedName)
override def hasAnnotation(qualifiedName: String): Option[ScAnnotation] = fun.hasAnnotation(qualifiedName)
override def hasAnnotation(clazz: PsiClass): Boolean = fun.hasAnnotation(clazz)
override def annotationNames: Seq[String] = fun.annotationNames
override def annotations: Seq[ScAnnotation] = fun.annotations
override def navigate(requestFocus: Boolean): Unit = fun.navigate(requestFocus)
override def canNavigate: Boolean = fun.canNavigate
override def canNavigateToSource: Boolean = fun.canNavigateToSource
override protected def findChildrenByClassScala[T >: Null <: ScalaPsiElement](clazz: Class[T]): Array[T] =
throw new UnsupportedOperationException("Operation on light function")
override protected def findChildByClassScala[T >: Null <: ScalaPsiElement](clazz: Class[T]): T =
throw new UnsupportedOperationException("Operation on light function")
}
| triggerNZ/intellij-scala | src/org/jetbrains/plugins/scala/lang/psi/light/scala/ScLightFunctionDeclaration.scala | Scala | apache-2.0 | 3,490 |
package controllers
import anorm.Row
import play.api.libs.json.{JsNumber, JsObject, JsValue}
object CheckRecord extends REST {
val tableName: String = "check_records"
val parameters: Map[String, (String, String) => ValidationResult] = Map(
"matchup_occurences" -> validateInt,
"kos_or_switches_caused" -> validateFloat,
"kos_or_switches_stddev" -> validateFloat,
"pokemon_id" -> validateInt,
"stat_record_id" -> validateInt
)
protected def single(row: Row): JsValue = {
row match {
case Row(id: Long, matchup_occurences: Long, kos_or_switches_caused: Float, kos_or_switches_stddev: Float, pokemon_id: Long, stat_record_id: Long) => {
JsObject(
"id" -> JsNumber(id) ::
"matchup_occurences" -> JsNumber(matchup_occurences) ::
"kos_or_switches_caused" -> JsNumber(kos_or_switches_caused) ::
"kos_or_switches_stddev" -> JsNumber(kos_or_switches_stddev) ::
"pokemon_id" -> JsNumber(pokemon_id) ::
"stat_record_id" -> JsNumber(stat_record_id) ::
Nil
)
}
case _ => {
println(row)
throw new IllegalArgumentException("Row provided is invalid!")
}
}
}
} | ishakir/PokeStat | app/controllers/CheckRecord.scala | Scala | mit | 1,282 |
package org.scaladebugger.api.lowlevel.events
import org.scalamock.scalatest.MockFactory
import org.scalatest.{FunSpec, Matchers, ParallelTestExecution}
import org.scaladebugger.api.lowlevel.events.EventManager.EventHandler
import org.scaladebugger.api.lowlevel.events.EventType.EventType
import org.scaladebugger.api.utils.{ActionInfo, PendingActionManager}
import org.scaladebugger.test.helpers.ParallelMockFunSpec
import test.{JDIMockHelpers, TestEventManager}
class PendingEventHandlerSupportSpec extends ParallelMockFunSpec with JDIMockHelpers
{
private val TestHandlerId = java.util.UUID.randomUUID().toString
private val mockEventManager = mock[EventManager]
private class TestEventInfoPendingActionManager
extends PendingActionManager[EventHandlerInfo]
private val mockPendingActionManager =
mock[TestEventInfoPendingActionManager]
private val pendingEventHandlerSupport = new TestEventManager(
mockEventManager
) with PendingEventHandlerSupport {
override protected def newEventId(): String = TestHandlerId
override protected val pendingActionManager: PendingActionManager[EventHandlerInfo] =
mockPendingActionManager
}
describe("PendingEventHandlerSupport") {
describe("#processAllPendingEventHandlers") {
it("should process all pending eventHandlers") {
val testEventType = stub[EventType]
val testEventHandler = stub[EventHandler]
val expected = Seq(
EventHandlerInfo(TestHandlerId, testEventType, testEventHandler),
EventHandlerInfo(TestHandlerId + 1, testEventType, testEventHandler),
EventHandlerInfo(TestHandlerId, testEventType, stub[EventHandler])
)
(mockPendingActionManager.processAllActions _).expects()
.returning(expected.map(b => ActionInfo("id", b, () => {}))).once()
val actual = pendingEventHandlerSupport.processAllPendingEventHandlers()
actual should be (expected)
}
}
describe("#pendingEventHandlers") {
it("should return a collection of all pending eventHandlers") {
val testEventType = stub[EventType]
val testEventHandler = stub[EventHandler]
val expected = Seq(
EventHandlerInfo(TestHandlerId, testEventType, testEventHandler),
EventHandlerInfo(TestHandlerId + 1, testEventType, testEventHandler),
EventHandlerInfo(TestHandlerId, testEventType, stub[EventHandler]),
EventHandlerInfo(TestHandlerId, stub[EventType], testEventHandler)
)
val actions = expected.map(ActionInfo.apply("", _: EventHandlerInfo, () => {}))
(mockPendingActionManager.getPendingActionData _).expects(*).onCall(
(f: ActionInfo[EventHandlerInfo] => Boolean) =>
actions.filter(f).map(_.data)
)
val actual = pendingEventHandlerSupport.pendingEventHandlers
actual should be (expected)
}
it("should be empty if there are no pending eventHandlers") {
val expected = Nil
// No pending eventHandlers
(mockPendingActionManager.getPendingActionData _).expects(*)
.returning(Nil).once()
val actual = pendingEventHandlerSupport.pendingEventHandlers
actual should be (expected)
}
}
describe("#addEventHandlerWithId") {
it("should add the event handler to pending if pending enabled") {
val expected = EventHandlerInfo(
TestHandlerId,
stub[EventType],
stub[EventHandler],
Seq(stub[JDIEventArgument])
)
pendingEventHandlerSupport.enablePendingSupport()
(mockPendingActionManager.addPendingAction _).expects(expected, *).once()
val eventHandlerId = pendingEventHandlerSupport.addEventHandlerWithId(
expected.eventHandlerId,
expected.eventType,
expected.eventHandler,
expected.extraArguments: _*
)
eventHandlerId should be (expected.eventHandlerId)
}
it("should add the event handler immediately if pending disabled") {
val expected = EventHandlerInfo(
TestHandlerId,
stub[EventType],
stub[EventHandler],
Seq(stub[JDIEventArgument])
)
// ====================================================================
// NOTE: Due to limitation of ScalaMock (cannot mock overloaded method),
// using DummyEventManager with explicit mock function to verify
// specific execution
val mockAddEventHandlerWithId =
mockFunction[String, EventType, EventHandler, Seq[JDIEventArgument], String]
val eventManager = new DummyEventManager {
override def addEventHandlerWithId(
eventHandlerId: String,
eventType: EventType,
eventHandler: EventHandler,
eventArguments: JDIEventArgument*
): String = mockAddEventHandlerWithId(
eventHandlerId,
eventType,
eventHandler,
eventArguments
)
}
val pendingEventHandlerSupport = new TestEventManager(
eventManager
) with PendingEventHandlerSupport {
override protected def newEventId(): String = TestHandlerId
override protected val pendingActionManager: PendingActionManager[EventHandlerInfo] =
mockPendingActionManager
}
// ====================================================================
pendingEventHandlerSupport.disablePendingSupport()
mockAddEventHandlerWithId.expects(
expected.eventHandlerId,
expected.eventType,
expected.eventHandler,
expected.extraArguments
).once()
val eventHandlerId = pendingEventHandlerSupport.addEventHandlerWithId(
expected.eventHandlerId,
expected.eventType,
expected.eventHandler,
expected.extraArguments: _*
)
eventHandlerId should be (expected.eventHandlerId)
}
}
}
}
| chipsenkbeil/scala-debugger | scala-debugger-api/src/test/scala/org/scaladebugger/api/lowlevel/events/PendingEventHandlerSupportSpec.scala | Scala | apache-2.0 | 6,054 |
package com.deweyvm.clock
import com.badlogic.gdx.backends.lwjgl.LwjglAWTCanvas
import java.awt.BorderLayout
import javax.swing.JFrame
import java.awt.event.{MouseEvent, MouseAdapter}
import com.badlogic.gdx.Gdx
object Main {
def main(args:Array[String]) {
var (posX, posY) = (0,0)
val (width, height) = (300,60)
val canvas = new LwjglAWTCanvas(new ClockGame, true)
val frame = new JFrame
val mode = Gdx.graphics.getDesktopDisplayMode
val y = mode.height - height
val x = (mode.width/3)*2
canvas.getCanvas.addMouseListener(new MouseAdapter() {
override def mousePressed(e:MouseEvent) {
posX = e.getX
posY = e.getY
}
})
canvas.getCanvas.addMouseMotionListener(new MouseAdapter() {
override def mouseDragged(e:MouseEvent) {
frame.setLocation(e.getXOnScreen - posX, e.getYOnScreen - posY)
}
})
frame.setAlwaysOnTop(true)
frame.setUndecorated(true)
frame.getContentPane.add(canvas.getCanvas, BorderLayout.CENTER)
frame.setSize(width, height)
frame.setLocation(x, y)
frame.setVisible(true)
}
}
| deweyvm/ScalaClock | src/com/deweyvm/clock/Main.scala | Scala | gpl-2.0 | 1,111 |
package ml.wolfe.term
case class MatrixApply(mat: MatrixTerm, i: IntTerm, j: IntTerm) extends ComposedDoubleTerm {
type ArgumentType = AnyTerm
val arguments = IndexedSeq(mat, i, j)
def copy(args: IndexedSeq[ArgumentType]) =
new MatrixApply(args(0).asInstanceOf[MatrixTerm],
args(1).asInstanceOf[IntTerm],
args(2).asInstanceOf[IntTerm]
)
override def composer(args: Settings) = new Composer(args) {
def eval()(implicit execution: Execution) = {
val i = input(1).disc(0)
val j = input(2).disc(0)
output.cont(0) = input(0).mats(0)(i,j)
}
}
}
| wolfe-pack/wolfe | wolfe-core/src/main/scala/ml/wolfe/term/MatrixApply.scala | Scala | apache-2.0 | 608 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.manager.utils
import java.util.Properties
import grizzled.slf4j.Logging
import kafka.manager.model.KafkaVersion
import org.apache.curator.framework.CuratorFramework
import org.apache.zookeeper.CreateMode
import org.apache.zookeeper.KeeperException.NodeExistsException
import scala.collection.{Set, mutable}
import scala.util.Random
/**
* Borrowed from kafka 0.8.1.1, adapted to use curator framework
* https://git-wip-us.apache.org/repos/asf?p=kafka.git;a=blob;f=core/src/main/scala/kafka/admin/AdminUtils.scala
*/
class AdminUtils(version: KafkaVersion) extends Logging {
val rand = new Random
val TopicConfigChangeZnodePrefix = "config_change_"
/**
* There are 2 goals of replica assignment:
* 1. Spread the replicas evenly among brokers.
* 2. For partitions assigned to a particular broker, their other replicas are spread over the other brokers.
*
* To achieve this goal, we:
* 1. Assign the first replica of each partition by round-robin, starting from a random position in the broker list.
* 2. Assign the remaining replicas of each partition with an increasing shift.
*
* Here is an example of assigning
* broker-0 broker-1 broker-2 broker-3 broker-4
* p0 p1 p2 p3 p4 (1st replica)
* p5 p6 p7 p8 p9 (1st replica)
* p4 p0 p1 p2 p3 (2nd replica)
* p8 p9 p5 p6 p7 (2nd replica)
* p3 p4 p0 p1 p2 (3nd replica)
* p7 p8 p9 p5 p6 (3nd replica)
*/
def assignReplicasToBrokers(brokerListSet: Set[Int],
nPartitions: Int,
replicationFactor: Int,
fixedStartIndex: Int = -1,
startPartitionId: Int = -1)
: Map[Int, Seq[Int]] = {
val brokerList : Seq[Int] = brokerListSet.toSeq.sorted
checkCondition(nPartitions > 0,TopicErrors.PartitionsGreaterThanZero)
checkCondition(replicationFactor > 0,TopicErrors.ReplicationGreaterThanZero)
checkCondition(replicationFactor <= brokerList.size,
TopicErrors.ReplicationGreaterThanNumBrokers(replicationFactor, brokerList.size))
val ret = new mutable.HashMap[Int, List[Int]]()
val startIndex = if (fixedStartIndex >= 0) fixedStartIndex else rand.nextInt(brokerList.size)
var currentPartitionId = if (startPartitionId >= 0) startPartitionId else 0
var nextReplicaShift = if (fixedStartIndex >= 0) fixedStartIndex else rand.nextInt(brokerList.size)
for (i <- 0 until nPartitions) {
if (currentPartitionId > 0 && (currentPartitionId % brokerList.size == 0))
nextReplicaShift += 1
val firstReplicaIndex = (currentPartitionId + startIndex) % brokerList.size
var replicaList = List(brokerList(firstReplicaIndex))
for (j <- 0 until replicationFactor - 1)
replicaList ::= brokerList(replicaIndex(firstReplicaIndex, nextReplicaShift, j, brokerList.size))
ret.put(currentPartitionId, replicaList.reverse)
currentPartitionId = currentPartitionId + 1
}
ret.toMap
}
private def replicaIndex(firstReplicaIndex: Int, secondReplicaShift: Int, replicaIndex: Int, nBrokers: Int): Int = {
val shift = 1 + (secondReplicaShift + replicaIndex) % (nBrokers - 1)
(firstReplicaIndex + shift) % nBrokers
}
def deleteTopic(curator: CuratorFramework, topic: String): Unit = {
checkCondition(topicExists(curator, topic),TopicErrors.TopicDoesNotExist(topic))
ZkUtils.createPersistentPath(curator,ZkUtils.getDeleteTopicPath(topic))
}
def createTopic(curator: CuratorFramework,
brokers: Set[Int],
topic: String,
partitions: Int,
replicationFactor: Int,
topicConfig: Properties = new Properties): Unit = {
val replicaAssignment = assignReplicasToBrokers(brokers,partitions,replicationFactor)
createOrUpdateTopicPartitionAssignmentPathInZK(curator, topic, replicaAssignment, topicConfig)
}
def createOrUpdateTopicPartitionAssignmentPathInZK(curator: CuratorFramework,
topic: String,
partitionReplicaAssignment: Map[Int, Seq[Int]],
config: Properties = new Properties,
update: Boolean = false,
readVersion: Int = -1) {
// validate arguments
Topic.validate(topic)
TopicConfigs.validate(version,config)
checkCondition(partitionReplicaAssignment.values.map(_.size).toSet.size == 1, TopicErrors.InconsistentPartitionReplicas)
val topicPath = ZkUtils.getTopicPath(topic)
if(!update ) {
checkCondition(curator.checkExists().forPath(topicPath) == null,TopicErrors.TopicAlreadyExists(topic))
}
partitionReplicaAssignment.foreach {
case (part,reps) => checkCondition(reps.size == reps.toSet.size, TopicErrors.DuplicateReplicaAssignment(topic,part,reps))
}
// write out the config on create, not update, if there is any, this isn't transactional with the partition assignments
if(!update) {
writeTopicConfig(curator, topic, config)
}
// create the partition assignment
writeTopicPartitionAssignment(curator, topic, partitionReplicaAssignment, update, readVersion)
}
/**
* Write out the topic config to zk, if there is any
*/
private def writeTopicConfig(curator: CuratorFramework, topic: String, config: Properties, readVersion: Int = -1) {
val configMap: mutable.Map[String, String] = {
import scala.collection.JavaConverters._
config.asScala
}
val map : Map[String, Any] = Map("version" -> 1, "config" -> configMap)
ZkUtils.updatePersistentPath(curator, ZkUtils.getTopicConfigPath(topic), toJson(map), readVersion)
}
private def writeTopicPartitionAssignment(curator: CuratorFramework,
topic: String,
replicaAssignment: Map[Int, Seq[Int]],
update: Boolean,
readVersion: Int = -1) {
try {
val zkPath = ZkUtils.getTopicPath(topic)
val jsonPartitionData = ZkUtils.replicaAssignmentZkData(replicaAssignment.map(e => (e._1.toString -> e._2)))
if (!update) {
logger.info(s"Topic creation ${jsonPartitionData.toString}")
ZkUtils.createPersistentPath(curator, zkPath, jsonPartitionData)
} else {
logger.info(s"Topic update ${jsonPartitionData.toString}")
ZkUtils.updatePersistentPath(curator, zkPath, jsonPartitionData, readVersion)
}
logger.debug("Updated path %s with %s for replica assignment".format(zkPath, jsonPartitionData))
} catch {
case e: NodeExistsException => throw new IllegalArgumentException("topic %s already exists".format(topic))
case e2: Throwable => throw new IllegalArgumentException(e2.toString)
}
}
/**
* Add partitions to existing topic with optional replica assignment
*
* @param curator Zookeeper client
* @param topic topic for adding partitions to
* @param newNumPartitions Number of partitions to be set
* @param partitionReplicaList current partition to replic set mapping
* @param brokerList broker list
*/
def addPartitions(curator: CuratorFramework,
topic: String,
newNumPartitions: Int,
partitionReplicaList : Map[Int, Seq[Int]],
brokerList: Set[Int],
readVersion: Int) {
/*
import collection.JavaConverters._
val newConfigSet = config.entrySet().asScala.map(e => (e.getKey.toString, e.getValue.toString)).toSet
if(newConfigSet == oldConfigSet) {
logger.info(s"No config changes. newConfigSet=$newConfigSet oldConfigSet=$oldConfigSet")
} else {
logger.info(s"Config changed. newConfigSet=$newConfigSet oldConfigSet=$oldConfigSet")
changeTopicConfig(curator,topic,config)
}*/
val brokerListSorted: Set[Int] = brokerList
val currentNumPartitions: Int = partitionReplicaList.size
checkCondition(currentNumPartitions > 0,
TopicErrors.PartitionsGreaterThanZero)
checkCondition(currentNumPartitions < newNumPartitions,
TopicErrors.CannotAddZeroPartitions(topic,currentNumPartitions,newNumPartitions))
val currentReplicationFactor: Int = partitionReplicaList.head._2.size
checkCondition(brokerListSorted.size >= currentReplicationFactor,
TopicErrors.ReplicationGreaterThanNumBrokers(currentReplicationFactor,brokerListSorted.size))
val partitionsToAdd = newNumPartitions - currentNumPartitions
// create the new partition replication list
val addedPartitionReplicaList : Map[Int, Seq[Int]] =
assignReplicasToBrokers(
brokerListSorted, partitionsToAdd, currentReplicationFactor, partitionReplicaList.head._2.head, currentNumPartitions)
logger.info("Add partition list for %s is %s".format(topic, addedPartitionReplicaList))
//val partitionReplicaList : Map[Int, Seq[Int]] = topicIdentity.partitionsIdentity.map(p => p._1 -> p._2.replicas.toSeq)
// add the new partitions
val newPartitionsReplicaList : Map[Int, Seq[Int]] = partitionReplicaList ++ addedPartitionReplicaList
checkCondition(newPartitionsReplicaList.size == newNumPartitions,
TopicErrors.FailedToAddNewPartitions(topic, newNumPartitions, newPartitionsReplicaList.size))
createOrUpdateTopicPartitionAssignmentPathInZK(curator, topic, newPartitionsReplicaList, update=true, readVersion=readVersion)
}
/* Add partitions to multiple topics. After this operation, all topics will have the same number of partitions */
def addPartitionsToTopics(curator: CuratorFramework,
topicAndReplicaList: Seq[(String, Map[Int, Seq[Int]])],
newNumPartitions: Int,
brokerList: Set[Int],
readVersions: Map[String,Int]) {
val topicsWithoutReadVersion = topicAndReplicaList.map(x=>x._1).filter{t => !readVersions.contains(t)}
checkCondition(topicsWithoutReadVersion.isEmpty, TopicErrors.NoReadVersionFound(topicsWithoutReadVersion.mkString(", ")))
// topicAndReplicaList is sorted by number of partitions each topic has in order not to start adding partitions if any of requests doesn't work with newNumPartitions
for {
(topic, replicaList) <- topicAndReplicaList
readVersion = readVersions(topic)
} {
addPartitions(curator, topic, newNumPartitions, replicaList, brokerList, readVersion)
}
}
/**
* Update the config for an existing topic and create a change notification so the change will propagate to other brokers
* @param curator: The zk client handle used to write the new config to zookeeper
* @param topic: The topic for which configs are being changed
* @param config: The final set of configs that will be applied to the topic. If any new configs need to be added or
* existing configs need to be deleted, it should be done prior to invoking this API
*
*/
def changeTopicConfig(curator: CuratorFramework, topic: String, config: Properties, readVersion: Int) {
checkCondition(topicExists(curator, topic),TopicErrors.TopicDoesNotExist(topic))
// remove the topic overrides
TopicConfigs.validate(version,config)
// write the new config--may not exist if there were previously no overrides
writeTopicConfig(curator, topic, config, readVersion)
// create the change notification
curator
.create()
.creatingParentsIfNeeded()
.withMode(CreateMode.PERSISTENT_SEQUENTIAL)
.forPath(s"${ZkUtils.TopicConfigChangesPath}/$TopicConfigChangeZnodePrefix", toJson(topic))
}
def topicExists(curator: CuratorFramework, topic: String): Boolean = {
val topicPath = ZkUtils.getTopicPath(topic)
val result = curator.checkExists().forPath(topicPath)
result != null
}
}
| xuwei-k/kafka-manager | app/kafka/manager/utils/AdminUtils.scala | Scala | apache-2.0 | 13,119 |
package uk.gov.dvla.vehicles.presentation.common.clientsidesession
import uk.gov.dvla.vehicles.presentation.common.{UnitSpec, TestWithApplication}
class Sha1HashSpec extends UnitSpec {
"Sha1Hash" should {
"return a hashed string" in new TestWithApplication {
sha1Hash.hash(ClearText) should not equal ClearText
}
"returns the same hash repeatedly" in new TestWithApplication {
val first = sha1Hash.hash(ClearText)
val second = sha1Hash.hash(ClearText)
first should equal(second)
}
"return expected length for the digest" in new TestWithApplication {
sha1Hash.digestStringLength should equal(40)
}
}
private val sha1Hash = new Sha1HashGenerator
// Sharing immutable fixture objects via instance variables
private final val ClearText = "qwerty"
} | dvla/vehicles-presentation-common | test/uk/gov/dvla/vehicles/presentation/common/clientsidesession/Sha1HashSpec.scala | Scala | mit | 814 |
package io.mpjsons
/**
* @author Marcin Pieciukiewicz
*/
import org.scalatest.FlatSpec
import org.scalatest.MustMatchers._
// this tests also non default constructor and final fields
class MapElement(var intValue: Int, val stringValue: String)
class SimpleMapsObject {
var emptyMap:Map[Int, Long] = Map.empty
var simpleMap:Map[String, String] = _
var primitiveMap:Map[Int, Long] = _
var objectMap:Map[MapElement, MapElement] = _
}
class MapsSpec extends FlatSpec {
val mpjsons = new MPJsons
"Serializer" must "handle maps" in {
val smo = new SimpleMapsObject
smo.simpleMap = Map("a" -> "Ala", "k" -> "Kot")
smo.primitiveMap = Map(1 -> 1224, 5 -> 5324)
smo.objectMap = Map(
new MapElement(1, "one") -> new MapElement(100, "one hundred"),
new MapElement(5, "five") -> new MapElement(500, "five hundred"))
val serialized = mpjsons.serialize(smo)
var smoDeserialized:SimpleMapsObject = mpjsons.deserialize[SimpleMapsObject](serialized)
smoDeserialized must not be (null)
smoDeserialized.simpleMap mustEqual smo.simpleMap
smoDeserialized.primitiveMap mustEqual smo.primitiveMap
smoDeserialized.objectMap.size mustEqual smo.objectMap.size
val serializedWithWhitespacesQuotes =
""" { "emptyMap" : [ ] ,
| "simpleMap" : [ ["a" , "Ala" ] , [ "k" , "Kot" ] ] ,
| "primitiveMap" : [ [1 , 1224] , [ 5 , 5324 ] ],
| "objectMap" : [ [ { "intValue" : 1 , "stringValue" : "one" } ,
| { "intValue" : 100 , "stringValue" : "one hundred" } ] ,
| [ {"intValue":5,"stringValue":"five"},
| {"intValue":500,"stringValue":"five hundred"} ] ] } """.stripMargin.lines.mkString("")
smoDeserialized = mpjsons.deserialize[SimpleMapsObject](serializedWithWhitespacesQuotes)
smoDeserialized must not be (null)
smoDeserialized.simpleMap mustEqual smo.simpleMap
smoDeserialized.primitiveMap mustEqual smo.primitiveMap
smoDeserialized.objectMap.size mustEqual smo.objectMap.size
}
}
| marpiec/mpjsons | src/test/scala/io/mpjsons/MapsSpec.scala | Scala | apache-2.0 | 2,060 |
package playground.views
import java.util.Date
import org.joda.time.DateTime
import playground.models._
/**
* To make formatting available in templates of your project configure
* your build.sbt or Build.scala with :
*
* settings(
* templatesImport += "playground.views.Formattable._"
* )
*/
object Formattable {
implicit class DateFormattable(d: Date) {
def |(formatter: Formatter[Date]) = formatter(d)
}
implicit class DateTimeFormattable(d: DateTime) {
def |(formatter: Formatter[Date]) = formatter(d.toDate)
}
implicit class StringFormattable(str: String) {
def |(formatter: Formatter[String]) = formatter(str)
}
implicit class IntFormattable(i: Int) {
def |(formatter: Formatter[Int]) = formatter(i)
}
implicit class LongFormattable(l: Long) {
def |(formatter: Formatter[Long]) = formatter(l)
}
implicit class FloatFormattable(f: Float) {
def |(formatter: Formatter[Float]) = formatter(f)
}
implicit class DoubleFormattable(d: Double) {
def |(formatter: Formatter[Double]) = formatter(d)
}
implicit class BooleanFormattable(b: Boolean) {
def |(formatter: Formatter[Boolean]) = formatter(b)
}
implicit class NameableFormattable(nameable: Nameable) {
def |(formatter: Formatter[Nameable]) = formatter(nameable)
}
implicit class IdFormattable(id: Id) {
def |(formatter: Formatter[Id]) = formatter(id)
}
}
| ybr/playground | src/main/scala/playground/views/Formattable.scala | Scala | mit | 1,409 |
/* NSC -- new Scala compiler
* Copyright 2005-2013 LAMP/EPFL
* @author Martin Odersky
*/
package dotty.tools
package dotc
import core.Contexts.Context
import config.Settings.Setting.*
object Main extends Driver {
def resident(compiler: Compiler): Unit = unsupported("resident") /*loop { line =>
val command = new CompilerCommand(line split "\\s+" toList, new Settings(scalacError))
compiler.reporter.reset()
new compiler.Run() compile command.files
}*/
override def newCompiler(): Compiler = new Compiler
override def doCompile(compiler: Compiler, fileNames: List[String])(implicit ctx: Context): Unit = {
if (ctx.base.settings.resident.value) resident(compiler) // error
else super.doCompile(compiler,123) // error: type mismatch
}
}
| lampepfl/dotty | tests/neg-with-compiler/Main.scala | Scala | apache-2.0 | 773 |
/*
* Copyright 2019 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.releaser
import java.io._
import java.nio.file.{Files, Path}
import java.util.jar._
import java.util.zip.{ZipEntry, ZipFile, ZipOutputStream}
import com.google.common.io.ByteStreams
import org.apache.commons.compress.archivers.tar.{TarArchiveEntry, TarArchiveInputStream, TarArchiveOutputStream}
import org.apache.commons.compress.compressors.gzip.{GzipCompressorInputStream, GzipCompressorOutputStream}
import org.apache.commons.io.{FileUtils, IOUtils}
import resource._
import uk.gov.hmrc.{Logger, PosixFileAttributes}
import scala.collection.JavaConversions._
import scala.util.{Failure, Try}
import scala.xml._
import scala.xml.transform.{RewriteRule, RuleTransformer}
trait Transformer {
def apply(localFile: Path, artefactName: String, sourceVersion: ReleaseCandidateVersion, targetVersion: ReleaseVersion, targetFile: Path): Try[Path]
}
trait XmlTransformer extends Transformer {
def apply(localPomFile: Path, artefactName: String, sourceVersion: ReleaseCandidateVersion, targetVersion: ReleaseVersion, targetFile: Path): Try[Path] = {
val updatedT: Try[Node] = updateVersion(XML.loadFile(localPomFile.toFile), targetVersion)
updatedT.flatMap { updated =>
Try {
Files.createDirectories(targetFile.getParent)
Files.write(targetFile, updated.mkString.getBytes)
}
}
}
def updateVersion(node: Node, newVersion: ReleaseVersion): Try[Node]
}
class PomTransformer extends XmlTransformer {
def updateVersion(node: Node, newVersion: ReleaseVersion): Try[Node] = {
if ((node \\ "version").isEmpty) {
Failure(new Exception("Didn't find project element in pom file"))
} else {
def updateVersionRec(node: Node, newVersion:ReleaseVersion): Node = node match {
case <project>{ n @ _* }</project> => <project>{ n.map { a => updateVersionRec(a, newVersion) }} </project>
case <version>{ _* }</version> => <version>{ newVersion.value }</version>
case other @ _ => other
}
Try {
updateVersionRec(node, newVersion)
}
}
}
}
class IvyTransformer extends XmlTransformer {
def updateVersion(node: Node, newVersion: ReleaseVersion): Try[Node] = {
if ((node \\ "info" \\ "@revision").isEmpty) {
Failure(new Exception("Didn't find revision element in ivy file"))
} else {
val rewrite = new RuleTransformer(new RewriteRule {
override def transform(node: Node) = node match {
case n: Elem if n.label == "info" =>
n % Attribute("revision", Text(newVersion.value), n.attributes.remove("revision"))
case other => other
}
})
Try {
rewrite(node)
}
}
}
}
class JarManifestTransformer extends Transformer {
val versionNumberFields = Set("Git-Describe", "Implementation-Version", "Specification-Version")
def manifestTransformer(manifest: Manifest, updatedVersionNumber: ReleaseVersion): Manifest = {
manifest.getMainAttributes.keysIterator.foldLeft(new Manifest()) { (newMan, key) =>
if (versionNumberFields.contains(key.toString)) {
newMan.getMainAttributes.put(key, updatedVersionNumber.value)
} else {
newMan.getMainAttributes.put(key, manifest.getMainAttributes.get(key))
}
newMan
}
}
def apply(localJarFile: Path, artefactName: String, sourceVersion: ReleaseCandidateVersion, targetVersion: ReleaseVersion, target: Path): Try[Path] = Try {
Files.createDirectories(target.getParent)
for {
jarFile <- managed(new ZipFile(localJarFile.toFile))
zout <- managed(new ZipOutputStream(new FileOutputStream(target.toFile)))
} {
jarFile.entries().foreach { ze =>
if (ze.getName == "META-INF/MANIFEST.MF") {
val newZipEntry = new ZipEntry(ze.getName)
newZipEntry.setTime(ze.getTime)
zout.putNextEntry(newZipEntry)
val newManifest: Manifest = manifestTransformer(new Manifest(jarFile.getInputStream(ze)), targetVersion)
newManifest.write(zout)
} else {
zout.putNextEntry(new ZipEntry(ze))
ByteStreams.copy(jarFile.getInputStream(ze), zout)
}
}
}
target
}
}
class TgzTransformer extends Transformer with Logger {
import PosixFileAttributes._
override def apply(localTgzFile: Path, artefactName: String, sourceVersion: ReleaseCandidateVersion, targetVersion: ReleaseVersion, targetFile: Path): Try[Path] = Try {
val decompressedArchivePath = decompressTgz(localTgzFile)
renameFolder(decompressedArchivePath, artefactName, sourceVersion, targetVersion)
compressTgz(decompressedArchivePath, targetFile)
targetFile
}
private def decompressTgz(localTgzFile: Path): Path = {
val bytes = new Array[Byte](2048)
val fin = new BufferedInputStream(new FileInputStream(localTgzFile.toFile))
val gzIn = new GzipCompressorInputStream(fin)
val tarIn = new TarArchiveInputStream(gzIn)
val targetDecompressPath = localTgzFile.getParent.resolve("tmp_tgz")
targetDecompressPath.toFile.mkdirs()
Iterator continually tarIn.getNextTarEntry takeWhile (null !=) foreach { tarEntry =>
val targetEntryFile = new File(targetDecompressPath.toFile, tarEntry.getName)
if (tarEntry.isDirectory) {
targetEntryFile.mkdirs()
} else {
targetEntryFile.getParentFile.mkdirs()
val fos = new BufferedOutputStream(new FileOutputStream(targetEntryFile), 2048)
Iterator continually tarIn.read(bytes) takeWhile (-1 !=) foreach (read => fos.write(bytes, 0, read))
fos.close()
Files.setPosixFilePermissions(targetEntryFile.toPath, tarEntry.getMode)
}
}
tarIn.close()
targetDecompressPath
}
private def renameFolder(decompressedArchivePath: Path, artefactName: String, sourceVersion: ReleaseCandidateVersion, targetVersion: ReleaseVersion): Try[Path] = Try {
val folderToRename = decompressedArchivePath.resolve(s"$artefactName-${sourceVersion.value}")
val targetFolder = folderToRename.resolveSibling(s"$artefactName-${targetVersion.value}")
FileUtils.moveDirectory(folderToRename.toFile, targetFolder.toFile)
targetFolder
}
private def compressTgz(expandedFolder: Path, targetFile: Path): Try[Path] = Try {
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream.LONGFILE_GNU
log.debug(s"Compressing Tgz to ${targetFile.toString}")
Files.createDirectories(targetFile.getParent)
val outputStream = new TarArchiveOutputStream(new GzipCompressorOutputStream(new BufferedOutputStream(new FileOutputStream(targetFile.toFile))))
outputStream.setLongFileMode(LONGFILE_GNU)
val mainEntry = new TarArchiveEntry(expandedFolder.toFile, ".")
addFolderToTarGz(outputStream, mainEntry)
outputStream.finish()
outputStream.close()
targetFile
}
private def addFolderToTarGz(tOut: TarArchiveOutputStream, tarEntry: TarArchiveEntry): Try[Unit] = Try {
val f = tarEntry.getFile
tOut.putArchiveEntry(tarEntry)
tOut.closeArchiveEntry()
val children = f.listFiles()
if (children != null) {
for (child <- children) {
addEntryToTarGz(tOut, new TarArchiveEntry(child, tarEntry.getName + child.getName))
}
}
}
private def addEntryToTarGz(tOut: TarArchiveOutputStream, tarEntry: TarArchiveEntry): Try[Unit] = Try {
val f = tarEntry.getFile
if (f.isFile) {
tarEntry.setMode(Files.getPosixFilePermissions(f.toPath))
tOut.putArchiveEntry(tarEntry)
IOUtils.copy(new FileInputStream(f), tOut)
tOut.closeArchiveEntry()
} else {
addFolderToTarGz(tOut, tarEntry)
}
}
}
class CopyAndRenameTransformer() extends Transformer {
override def apply(localFile: Path, artefactName: String, sourceVersion: ReleaseCandidateVersion, targetVersion: ReleaseVersion, targetFile: Path): Try[Path] = {
Try {
Files.createDirectories(targetFile.getParent)
Files.copy(localFile, targetFile)
}
}
}
| hmrc/releaser | src/main/scala/uk/gov/hmrc/releaser/Transformers.scala | Scala | apache-2.0 | 8,596 |
package com.nefariouszhen.trie
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
trait BurstTrie[T] {
def put(word: String, t: T): Unit
def query(q: String): Iterator[T]
}
class BurstTrieSet (impl: BurstTrie[String]) {
def put(word: String): Unit = impl.put(word, word)
def query(q: String): Iterator[String] = impl.query(q)
}
object BurstTrie {
val DEFAULT_BURST = 10000
val DEFAULT_GROWTH = 2
def newMap[T](burstFactor: Int = BurstTrie.DEFAULT_BURST, growthFactor: Int = BurstTrie.DEFAULT_GROWTH): BurstTrie[T] = {
new BurstTrieImpl[T, Iterable[T]](burstFactor, growthFactor, (_, t) => Some(t), () => None)
}
def newMultiMap[T](burstFactor: Int = BurstTrie.DEFAULT_BURST, growthFactor: Int = BurstTrie.DEFAULT_GROWTH, allowDuplicateEntries: Boolean = true): BurstTrie[T] = {
if (allowDuplicateEntries) {
new BurstTrieImpl[T, ArrayBuffer[T]](burstFactor, growthFactor, (arr, t) => arr += t, () => ArrayBuffer[T]())
} else {
new BurstTrieImpl[T, mutable.HashSet[T]](burstFactor, growthFactor, (arr, t) => arr += t, () => mutable.HashSet[T]())
}
}
def newSuffixMap[T](burstFactor: Int = BurstTrie.DEFAULT_BURST, growthFactor: Int = BurstTrie.DEFAULT_GROWTH, allowDuplicateEntries: Boolean = true): BurstTrie[T] = {
if (allowDuplicateEntries) {
new BurstTrieImpl[T, mutable.Buffer[T]](burstFactor, growthFactor, (arr, t) => arr += t, () => mutable.ArrayBuffer[T]() ) {
override def put(word: String, t: T): Unit = {
for (idx <- 0 until word.length) {
val suffix = word.slice(idx, word.length)
super.put(suffix, t)
}
}
}
} else {
new BurstTrieImpl[T, mutable.HashSet[T]](burstFactor, growthFactor, (arr, t) => arr += t, () => mutable.HashSet[T]() ) {
override def put(word: String, t: T): Unit = {
for (idx <- 0 until word.length) {
val suffix = word.slice(idx, word.length)
super.put(suffix, t)
}
}
}
}
}
def newSet(burstFactor: Int = BurstTrie.DEFAULT_BURST, growthFactor: Int = BurstTrie.DEFAULT_GROWTH): BurstTrieSet = {
new BurstTrieSet(newMap[String](burstFactor, growthFactor))
}
def newSuffixSet(burstFactor: Int = BurstTrie.DEFAULT_BURST, growthFactor: Int = BurstTrie.DEFAULT_GROWTH, allowDuplicateEntries: Boolean = true): BurstTrieSet = {
new BurstTrieSet(newSuffixMap[String](burstFactor, growthFactor, allowDuplicateEntries))
}
}
| nbauernfeind/scala-burst-trie | src/main/scala/com/nefariouszhen/trie/BurstTrie.scala | Scala | apache-2.0 | 2,500 |
/*
* Tests for verit's interface.
**/
package at.logic.gapt.provers.z3
import at.logic.gapt.examples.BussTautology
import at.logic.gapt.expr._
import at.logic.gapt.proofs.HOLSequent
import at.logic.gapt.provers.smtlib.Z3
import org.specs2.mutable._
class Z3ProverTest extends Specification {
args( skipAll = !Z3.isInstalled )
"z3" should {
"prove a ∨ ¬ a" in {
val a = FOLAtom( "a" )
Z3.isValid( Or( a, Neg( a ) ) ) must_== true
}
"a |- a" in {
val a = FOLAtom( "a" )
Z3.isValid( a +: HOLSequent() :+ a ) must_== true
}
"prove top" in {
Z3.isValid( HOLSequent() :+ Top() ) must_== true
}
"not prove bottom" in {
Z3.isValid( HOLSequent() :+ Bottom() ) must_== false
}
"not refute top" in {
Z3.isValid( Top() +: HOLSequent() ) must_== false
}
"refute bottom" in {
Z3.isValid( Bottom() +: HOLSequent() ) must_== true
}
"validate the buss tautology for n=1" in {
Z3.isValid( BussTautology( 1 ) ) must_== true
}
}
}
| gebner/gapt | tests/src/test/scala/at/logic/gapt/provers/z3/Z3ProverTest.scala | Scala | gpl-3.0 | 1,042 |
package org.jetbrains.plugins.scala.lang.parameterInfo.typeParameterInfo
class TypeParameterInfoExtendsTest extends TypeParameterInfoTestBase {
override def getTestDataPath: String =
s"${super.getTestDataPath}Extends/"
def testAllBounds(): Unit = doTest()
def testJavaGeneric(): Unit = doTest()
def testScalaGenericExtends(): Unit = doTest()
def testScalaLowerBound(): Unit = doTest()
def testScalaViewBound(): Unit = doTest()
} | JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/lang/parameterInfo/typeParameterInfo/TypeParameterInfoExtendsTest.scala | Scala | apache-2.0 | 451 |
package org.jetbrains.plugins.scala
package lang.refactoring.introduceVariable
import java.awt._
import java.awt.event.{ActionEvent, ActionListener, ItemEvent, ItemListener}
import javax.swing._
import javax.swing.event.{HyperlinkEvent, HyperlinkListener}
import com.intellij.codeInsight.template.impl.{TemplateManagerImpl, TemplateState}
import com.intellij.openapi.application.{ApplicationManager, Result}
import com.intellij.openapi.command.WriteCommandAction
import com.intellij.openapi.command.impl.StartMarkAction
import com.intellij.openapi.command.undo.UndoManager
import com.intellij.openapi.editor.event.{DocumentAdapter, DocumentEvent, DocumentListener}
import com.intellij.openapi.editor.ex.DocumentEx
import com.intellij.openapi.editor.markup.RangeHighlighter
import com.intellij.openapi.editor.{Editor, ScrollType}
import com.intellij.openapi.project.Project
import com.intellij.openapi.ui.VerticalFlowLayout
import com.intellij.openapi.util.TextRange
import com.intellij.openapi.vfs.VirtualFile
import com.intellij.psi._
import com.intellij.psi.search.{LocalSearchScope, SearchScope}
import com.intellij.psi.util.PsiTreeUtil
import com.intellij.refactoring.introduce.inplace.InplaceVariableIntroducer
import com.intellij.ui.NonFocusableCheckBox
import org.jetbrains.plugins.scala.lang.formatting.settings.ScalaCodeStyleSettings
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.ScTypedPattern
import org.jetbrains.plugins.scala.lang.psi.api.expr.{ScEnumerator, ScExpression}
import org.jetbrains.plugins.scala.lang.psi.api.statements._
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScNamedElement
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory._
import org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.lang.psi.{ScalaPsiElement, ScalaPsiUtil}
import org.jetbrains.plugins.scala.lang.refactoring.introduceVariable.ScalaInplaceVariableIntroducer.addTypeAnnotation
import org.jetbrains.plugins.scala.lang.refactoring.util.{BalloonConflictsReporter, ScalaNamesUtil, ScalaVariableValidator, ValidationReporter}
import org.jetbrains.plugins.scala.project.ProjectContext
import org.jetbrains.plugins.scala.settings.ScalaApplicationSettings
import org.jetbrains.plugins.scala.util.TypeAnnotationUtil
import scala.collection.mutable
/**
* Nikolay.Tropin
* 6/3/13
*/
class ScalaInplaceVariableIntroducer(project: Project,
editor: Editor,
expr: ScExpression,
types: Array[ScType],
namedElement: PsiNamedElement,
title: String,
replaceAll: Boolean,
asVar: Boolean,
forceInferType: Option[Boolean])
extends InplaceVariableIntroducer[ScExpression](namedElement, editor, project, title, Array.empty[ScExpression], expr) {
implicit def projectContext: ProjectContext = project
private var myVarCheckbox: JCheckBox = null
private var mySpecifyTypeChb: JCheckBox = null
private var myDeclarationStartOffset: Int = 0
private val newDeclaration = ScalaPsiUtil.getParentOfType(namedElement, classOf[ScEnumerator], classOf[ScDeclaredElementsHolder])
private var myCheckIdentifierListener: DocumentListener = null
private val myFile: PsiFile = namedElement.getContainingFile
private val myBalloonPanel: JPanel = new JPanel()
private var nameIsValid: Boolean = true
private val isEnumerator: Boolean = newDeclaration.isInstanceOf[ScEnumerator]
private val initialName = ScalaNamesUtil.scalaName(namedElement)
private val myLabel = new JLabel()
private val myLabelPanel = new JPanel()
private val myChbPanel = new JPanel()
private val typePanel = new JPanel()
setDeclaration(newDeclaration)
myCheckIdentifierListener = checkIdentifierListener()
private def checkIdentifierListener(): DocumentListener = new DocumentAdapter() {
override def documentChanged(e: DocumentEvent): Unit = {
commitDocument()
val range = new TextRange(myCaretRangeMarker.getStartOffset, myCaretRangeMarker.getEndOffset)
if (range.getLength == 0 && UndoManager.getInstance(myProject).isUndoInProgress) {}
else {
val input = myCaretRangeMarker.getDocument.getText(range)
val numberOfSpaces = input.lastIndexOf(' ') + 1
val declaration = findDeclaration(range.getStartOffset + numberOfSpaces)
val named: Option[ScNamedElement] = namedElement(declaration)
if (named.isDefined) {
setDeclaration(declaration)
if (nameIsValid != (named.isDefined && isIdentifier(input.trim, myFile.getLanguage))) {
nameIsValid = !nameIsValid
}
resetBalloonPanel(nameIsValid)
} else {
nameIsValid = false
resetBalloonPanel(nameIsValid)
}
}
super.documentChanged(e)
}
}
private def namedElement(declaration: PsiElement): Option[ScNamedElement] = declaration match {
case value: ScValue => value.declaredElements.headOption
case variable: ScVariable => variable.declaredElements.headOption
case enumerator: ScEnumerator => enumerator.pattern.bindings.headOption
case _ => None
}
private def findDeclaration(offset: Int): PsiElement = {
val elem = myFile.findElementAt(offset)
ScalaPsiUtil.getParentOfType(elem, classOf[ScEnumerator], classOf[ScDeclaredElementsHolder])
}
private def getDeclaration: PsiElement = findDeclaration(myDeclarationStartOffset)
private def setDeclaration(declaration: PsiElement): Unit = {
myDeclarationStartOffset = declaration.getTextRange.getStartOffset
}
private def commitDocument(): Unit = {
PsiDocumentManager.getInstance(myProject).commitDocument(myEditor.getDocument)
}
private def needInferType = forceInferType.getOrElse {
if (mySpecifyTypeChb != null) mySpecifyTypeChb.isSelected
else addTypeAnnotation(namedElement, expr)
}
override def getInitialName: String = initialName
protected override def getComponent: JComponent = {
if (!isEnumerator) {
myVarCheckbox = new NonFocusableCheckBox(ScalaBundle.message("introduce.variable.declare.as.var"))
myVarCheckbox.setMnemonic('v')
myVarCheckbox.addActionListener(new ActionListener {
def actionPerformed(e: ActionEvent): Unit = {
val writeAction = new WriteCommandAction[Unit](myProject, getCommandName, getCommandName) {
private def changeValOrVar(asVar: Boolean, declaration: PsiElement): Unit = {
val replacement =
declaration match {
case value: ScValue if asVar =>
createVarFromValDeclaration(value)
case variable: ScVariableDefinition if !asVar =>
createValFromVarDefinition(variable)
case _ => declaration
}
if (replacement != declaration) setDeclaration(declaration.replace(replacement))
}
protected def run(result: Result[Unit]): Unit = {
changeValOrVar(myVarCheckbox.isSelected, getDeclaration)
commitDocument()
}
}
writeAction.execute()
}
})
}
if (types.nonEmpty && forceInferType.isEmpty) {
val selectedType = types(0)
mySpecifyTypeChb = new NonFocusableCheckBox(ScalaBundle.message("introduce.variable.specify.type.explicitly"))
mySpecifyTypeChb.setSelected(addTypeAnnotation(namedElement, expr))
mySpecifyTypeChb.setMnemonic('t')
mySpecifyTypeChb.addItemListener(new ItemListener {
override def itemStateChanged(e: ItemEvent): Unit = {
val greedyToRight = mutable.WeakHashMap[RangeHighlighter, Boolean]()
def setGreedyToRightToFalse(): Unit = {
val highlighters: Array[RangeHighlighter] = myEditor.getMarkupModel.getAllHighlighters
for (highlighter <- highlighters; if checkRange(highlighter.getStartOffset, highlighter.getEndOffset))
greedyToRight += (highlighter -> highlighter.isGreedyToRight)
}
def resetGreedyToRightBack(): Unit = {
val highlighters: Array[RangeHighlighter] = myEditor.getMarkupModel.getAllHighlighters
for (highlighter <- highlighters; if checkRange(highlighter.getStartOffset, highlighter.getEndOffset))
highlighter.setGreedyToRight(greedyToRight(highlighter))
}
def checkRange(start: Int, end: Int): Boolean = {
val named: Option[ScNamedElement] = namedElement(getDeclaration)
if (named.isDefined) {
val nameRange = named.get.getNameIdentifier.getTextRange
nameRange.getStartOffset == start && nameRange.getEndOffset <= end
} else false
}
val writeAction = new WriteCommandAction[Unit](myProject, getCommandName, getCommandName) {
private def addTypeAnnotation(selectedType: ScType): Unit = {
val declaration = getDeclaration
declaration match {
case _: ScDeclaredElementsHolder | _: ScEnumerator =>
val declarationCopy = declaration.copy.asInstanceOf[ScalaPsiElement]
val fakeDeclaration = createDeclaration(selectedType, "x", isVariable = false, "", isPresentableText = false)
val first = fakeDeclaration.findFirstChildByType(ScalaTokenTypes.tCOLON)
val last = fakeDeclaration.findFirstChildByType(ScalaTokenTypes.tASSIGN)
val assign = declarationCopy.findFirstChildByType(ScalaTokenTypes.tASSIGN)
declarationCopy.addRangeAfter(first, last, assign)
assign.delete()
val replaced = getDeclaration.replace(declarationCopy)
ScalaPsiUtil.adjustTypes(replaced)
setDeclaration(replaced)
commitDocument()
case _ =>
}
}
private def removeTypeAnnotation(): Unit = {
getDeclaration match {
case holder: ScDeclaredElementsHolder =>
val colon = holder.findFirstChildByType(ScalaTokenTypes.tCOLON)
val assign = holder.findFirstChildByType(ScalaTokenTypes.tASSIGN)
implicit val manager = myFile.getManager
val whiteSpace = createExpressionFromText("1 + 1").findElementAt(1)
val newWhiteSpace = holder.addBefore(whiteSpace, assign)
holder.getNode.removeRange(colon.getNode, newWhiteSpace.getNode)
setDeclaration(holder)
commitDocument()
case enum: ScEnumerator if enum.pattern.isInstanceOf[ScTypedPattern] =>
val colon = enum.pattern.findFirstChildByType(ScalaTokenTypes.tCOLON)
enum.pattern.getNode.removeRange(colon.getNode, null)
setDeclaration(enum)
commitDocument()
case _ =>
}
}
protected def run(result: Result[Unit]): Unit = {
commitDocument()
setGreedyToRightToFalse()
if (needInferType) {
addTypeAnnotation(selectedType)
} else {
removeTypeAnnotation()
}
}
}
writeAction.execute()
ApplicationManager.getApplication.runReadAction(new Runnable {
def run(): Unit = {
if (addTypeAnnotation(namedElement, expr)) resetGreedyToRightBack()
}
})
}
})
}
myEditor.getDocument.addDocumentListener(myCheckIdentifierListener)
setBalloonPanel(nameIsValid = true)
myBalloonPanel
}
private def setUpTypePanel(): JPanel = {
typePanel.setLayout(new FlowLayout(FlowLayout.LEFT, 0, 0))
typePanel.add(mySpecifyTypeChb)
val myLinkContainer = new JPanel
myLinkContainer.setLayout(new FlowLayout(FlowLayout.LEFT, 0, 0))
typePanel.add(myLinkContainer)
val link = TypeAnnotationUtil.createTypeAnnotationsHLink(project, ScalaBundle.message("default.ta.settings"))
link.addHyperlinkListener(new HyperlinkListener() {
override def hyperlinkUpdate(e: HyperlinkEvent) {
extensions.invokeLater {
mySpecifyTypeChb.setSelected(addTypeAnnotation(namedElement, expr))
}
}
})
myLinkContainer.add(link)
typePanel
}
private def setBalloonPanel(nameIsValid: Boolean): Unit = {
this.nameIsValid = nameIsValid
myChbPanel.setLayout(new VerticalFlowLayout)
myChbPanel.setBorder(null)
Seq(myVarCheckbox, setUpTypePanel()).filter(_ != null).foreach { chb =>
myChbPanel.add(chb)
chb.setEnabled(nameIsValid)
}
myLabel.setText(ScalaBundle.message("introduce.variable.identifier.is.not.valid"))
myLabel.setForeground(Color.RED)
myLabelPanel.setLayout(new BoxLayout(myLabelPanel, BoxLayout.X_AXIS))
myLabelPanel.add(Box.createHorizontalGlue())
myLabelPanel.add(myLabel)
myLabelPanel.add(Box.createHorizontalGlue())
if (!nameIsValid) myBalloonPanel add myLabelPanel
else myBalloonPanel add myChbPanel
}
private def resetBalloonPanel(nameIsValid: Boolean): Unit = {
def isBulkUpdate = myEditor.getDocument match {
case docEx: DocumentEx => docEx.isInBulkUpdate
case _ => false
}
if (myBalloon.isDisposed || isBulkUpdate) return
if (!nameIsValid) {
myBalloonPanel add myLabelPanel
myBalloonPanel remove myChbPanel
}
else {
myBalloonPanel add myChbPanel
myBalloonPanel remove myLabelPanel
}
Seq(myVarCheckbox, mySpecifyTypeChb) filter(_ != null) foreach (_.setEnabled(nameIsValid))
myBalloon.revalidate()
}
protected override def moveOffsetAfter(success: Boolean): Unit = {
try {
myBalloon.hide()
if (success) {
if (myExprMarker != null) {
val startOffset: Int = myExprMarker.getStartOffset
val elementAt: PsiElement = myFile.findElementAt(startOffset)
if (elementAt != null) {
myEditor.getCaretModel.moveToOffset(elementAt.getTextRange.getEndOffset)
}
else {
myEditor.getCaretModel.moveToOffset(myExprMarker.getEndOffset)
}
} else if (getDeclaration != null) {
val declaration = getDeclaration
myEditor.getCaretModel.moveToOffset(declaration.getTextRange.getEndOffset)
}
} else if (getDeclaration != null && !UndoManager.getInstance(myProject).isUndoInProgress) {
val revertInfo = myEditor.getUserData(ScalaIntroduceVariableHandler.REVERT_INFO)
if (revertInfo != null) {
extensions.inWriteAction {
myEditor.getDocument.replaceString(0, myFile.getTextLength, revertInfo.fileText)
}
myEditor.getCaretModel.moveToOffset(revertInfo.caretOffset)
myEditor.getScrollingModel.scrollToCaret(ScrollType.MAKE_VISIBLE)
}
}
}
finally {
import scala.collection.JavaConversions._
for (occurrenceMarker <- getOccurrenceMarkers) {
occurrenceMarker.dispose()
}
if (getExprMarker != null) getExprMarker.dispose()
}
}
protected override def getReferencesSearchScope(file: VirtualFile): SearchScope = {
new LocalSearchScope(myElementToRename.getContainingFile)
}
protected override def checkLocalScope(): PsiElement = {
val scope = new LocalSearchScope(myElementToRename.getContainingFile)
val elements: Array[PsiElement] = scope.getScope
PsiTreeUtil.findCommonParent(elements: _*)
}
protected override def startRename: StartMarkAction = {
StartMarkAction.start(myEditor, myProject, getCommandName)
}
override def finish(success: Boolean): Unit = {
myEditor.getDocument.removeDocumentListener(myCheckIdentifierListener)
if (mySpecifyTypeChb != null && !isEnumerator) ScalaApplicationSettings.getInstance.INTRODUCE_VARIABLE_EXPLICIT_TYPE = mySpecifyTypeChb.isSelected
try {
val named = namedElement(getDeclaration).orNull
val templateState: TemplateState = TemplateManagerImpl.getTemplateState(myEditor)
if (named != null && templateState != null) {
val occurrences = (for (i <- 0 until templateState.getSegmentsCount) yield templateState.getSegmentRange(i)).toArray
implicit val validator = ScalaVariableValidator(myFile, named, occurrences)
val reporter = new ValidationReporter(myProject, new BalloonConflictsReporter(myEditor))
reporter.isOK(named.name, replaceAll)
}
}
catch {
//templateState can contain null private fields
case _: NullPointerException =>
}
finally {
myEditor.getSelectionModel.removeSelection()
}
super.finish(success)
}
}
object ScalaInplaceVariableIntroducer {
def addTypeAnnotation(anchor: PsiElement, expression: ScExpression, fromDialogMode: Boolean = false): Boolean = {
if (fromDialogMode) {
ScalaApplicationSettings.getInstance.INTRODUCE_VARIABLE_EXPLICIT_TYPE
} else {
val isLocal = TypeAnnotationUtil.isLocal(anchor)
val visibility = if (!isLocal) TypeAnnotationUtil.Private else TypeAnnotationUtil.Public
val settings = ScalaCodeStyleSettings.getInstance(expression.getProject)
TypeAnnotationUtil.isTypeAnnotationNeeded(
TypeAnnotationUtil.requirementForProperty(isLocal, visibility, settings),
settings.OVERRIDING_PROPERTY_TYPE_ANNOTATION,
settings.SIMPLE_PROPERTY_TYPE_ANNOTATION,
isOverride = false, // no overriding enable in current refactoring
isSimple = TypeAnnotationUtil.isSimple(expression)
)
}
}
} | ilinum/intellij-scala | src/org/jetbrains/plugins/scala/lang/refactoring/introduceVariable/ScalaInplaceVariableIntroducer.scala | Scala | apache-2.0 | 17,952 |
package travisTest.travisstub
import scala.scalajs.js
import scala.scalajs.js.Dynamic.global
import scala.scalajs.test.JasmineTest
object ElementCreatorTest extends JasmineTest with logging {
// ElementCreator expects jquery to be present
global.importScripts("jquery-2.1.0.js")
describe("ElementCreator") {
it("should be able to create an element in the body") {
// create the element
ElementCreator.create()
// jquery would make this easier, but I wanted to
// only use pure html in the test itself
val body = global.document.getElementsByTagName("body")
.asInstanceOf[js.Array[js.Dynamic]].head
// the Scala.js DOM API would make this easier
expect(body.firstChild.tagName.toString == "H1").toBeTruthy
expect(body.firstChild.innerHTML.toString == "Test").toBeTruthy
}
}
} | IridiumMaster/scalaJSStub | src/test/scala/travisTest/travisstub/ElementCreatorTest.scala | Scala | apache-2.0 | 868 |
package gapt.formats.ivy
import gapt.proofs._
import gapt.formats.lisp.SExpression
import gapt.expr._
import gapt.expr.formula.fol.FOLAtom
import gapt.expr.formula.fol.FOLConst
import gapt.expr.formula.fol.FOLTerm
import gapt.expr.subst.FOLSubstitution
/**
* ** Implementation of Ivy's Resolution Calculus ***
* Ivy has it's own variation of resolution which only resolves over identical literals but has an instantiation rule.
* It should be possible to display the proofs in prooftool, but a translation to robinson resolution is neccessary for
* many applications.
*/
sealed trait IvyResolutionProof extends SequentProof[FOLAtom, IvyResolutionProof] {
val id: String
val clause_exp: SExpression
// FIXME: provide a SequentProof trait without SequentConnectors
override def mainIndices: Seq[SequentIndex] = ???
override def occConnectors: Seq[SequentConnector] = ???
override def auxIndices: Seq[Seq[SequentIndex]] = ???
}
case class InitialClause(
id: String,
clause_exp: SExpression,
conclusion: FOLClause ) extends IvyResolutionProof {
override def immediateSubProofs = Seq()
}
case class Instantiate(
id: String,
clause_exp: SExpression,
substitution: FOLSubstitution,
conclusion: FOLClause, t: IvyResolutionProof ) extends IvyResolutionProof {
override def immediateSubProofs = Seq( t )
}
case class Flip(
id: String,
clause_exp: SExpression, flipped: SequentIndex,
conclusion: FOLClause, t: IvyResolutionProof ) extends IvyResolutionProof {
override def immediateSubProofs = Seq( t )
}
case class Propositional(
id: String,
clause_exp: SExpression,
conclusion: FOLClause,
t: IvyResolutionProof )
extends IvyResolutionProof {
override def immediateSubProofs = Seq( t )
}
case class Paramodulation(
id: String,
clause_exp: SExpression,
position: List[Int],
eq: SequentIndex,
lit: SequentIndex,
newLit: FOLAtom,
is_demodulation: Boolean, // if the formula should be left to right or right to left
conclusion: FOLClause,
t1: IvyResolutionProof,
t2: IvyResolutionProof )
extends IvyResolutionProof {
override def immediateSubProofs = Seq( t1, t2 )
}
case class Resolution(
id: String,
clause_exp: SExpression,
lit1: SequentIndex, //resolved literal in t1
lit2: SequentIndex, //resolved literal in t2
conclusion: FOLClause,
t1: IvyResolutionProof,
t2: IvyResolutionProof )
extends IvyResolutionProof {
require( t1.conclusion( lit1 ) == t2.conclusion( lit2 ) )
require( !( lit1 sameSideAs lit2 ) )
override def immediateSubProofs = Seq( t1, t2 )
}
case class NewSymbol(
id: String,
clause_exp: SExpression,
lit: SequentIndex,
new_symbol: FOLConst,
replacement_term: FOLTerm,
conclusion: FOLClause,
t: IvyResolutionProof ) extends IvyResolutionProof {
override def immediateSubProofs = Seq( t )
}
| gapt/gapt | core/src/main/scala/gapt/formats/ivy/IvyResolution.scala | Scala | gpl-3.0 | 3,134 |
package spinoco.protocol.mgcp
import spinoco.protocol.mgcp.MGCPResponseCode.{Provisional, Success}
object MGCPResponseCode {
case class Success(code:Int) extends MGCPResponseCode
case class Provisional(code: Int) extends MGCPResponseCode
case class Error(code: Int) extends MGCPResponseCode
case class PackageSpecific(code: Int) extends MGCPResponseCode
// Acknowledgement of response
val Ack = Success(0)
// The transaction is currently being executed. An actual
// completion message will follow later.
val InProgress = Provisional(100)
// The transaction has been queued for execution. An actual
// completion message will follow later.
val Queued = Provisional(101)
// The requested transaction was executed normally. This return
// code can be used for a successful response to any command.
val Ok = Success(200)
// The connection was deleted. This return code can only be used
// for a successful response to a DeleteConnection command.
val DeletedSuccessfully = Success(250)
// The transaction could not be executed, due to some unspecified
// transient error.
val TransientError = Error(400)
//The phone is already off hook.
val AlreadyOffHook = Error(401)
// The phone is already on hook.
val AlreadyOnHook = Error(402)
// The transaction could not be executed, because the endpoint does
// not have sufficient resources at this time.
val InsufficientResources = Error(403)
// Insufficient bandwidth at this time.
val InsufficientBandwidth = Error(404)
// The transaction could not be executed, because the endpoint is "restarting".
val RestartInProgress = Error(405)
// Transaction time-out. The transaction did not complete in a
// reasonable period of time and has been aborted.
val TransactionTimeout = Error(406)
// Transaction aborted. The transaction was aborted by some
// external action, e.g., a ModifyConnection command aborted by a
// DeleteConnection command.
val TransactionAborted = Error(407)
// The transaction could not be executed because of internal
// overload.
val SystemOverloaded = Error(409)
// No endpoint available. A valid "any of" wildcard was used,
// however there was no endpoint available to satisfy the request.
val NoEndpointAvailable = Error(410)
// The transaction could not be executed, because the endpoint is
// unknown.
val EndpointNotFound = Error(500)
// The transaction could not be executed, because the endpoint is
// not ready. This includes the case where the endpoint is out-of-
// service.
val NotReady = Error(501)
// The transaction could not be executed, because the endpoint does
// not have sufficient resources (permanent condition).
val NoResources = Error(502)
// "All of" wildcard too complicated.
val WildcardTooComplicated = Error(503)
// 504 Unknown or unsupported command.
val Unsupported = Error(504)
// Unsupported RemoteConnectionDescriptor. This SHOULD be used when
// one or more mandatory parameters or values in the
// RemoteConnectionDescriptor is not supported.
val UnsupportedRemoteDescriptor = Error(505)
// Unable to satisfy both LocalConnectionOptions and
// RemoteConnectionDescriptor. This SHOULD be used when the
// LocalConnectionOptions and RemoteConnectionDescriptor contain one
// or more mandatory parameters or values that conflict with each
// other and/or cannot be supported at the same time (except for
// codec negotiation failure - see error code 534).
val ConflictingDescriptor = Error(505)
// Unsupported functionality. Some unspecified functionality
// required to carry out the command is not supported. Note that
// several other error codes have been defined for specific areas of
// unsupported functionality (e.g. 508, 511, etc.), and this error
// code SHOULD only be used if there is no other more specific error
// code for the unsupported functionality.
val UnsupportedFunctionality = Error(507)
// Unknown or unsupported quarantine handling.
val UnsupportedQuarantine = Error(508)
// Error in RemoteConnectionDescriptor. This SHOULD be used when
// there is a syntax or semantic error in the
// RemoteConnectionDescriptor.
val ErrorInRemoteDescriptor = Error(509)
// The transaction could not be executed, because some unspecified
// protocol error was detected. Automatic recovery from such an
// error will be very difficult, and hence this code SHOULD only be
// used as a last resort.
val UnspecifiedProtocol = Error(510)
// The transaction could not be executed, because the command
// contained an unrecognized extension. This code SHOULD be used
// for unsupported critical parameter extensions ("X+").
val UnknownExtension = Error(510)
// The transaction could not be executed, because the gateway is not
// equipped to detect one of the requested events.
val UnsupportedEvent = Error(512)
// The transaction could not be executed, because the gateway is not
// equipped to generate one of the requested signals.
val UnsupportedSignal = Error(512)
// The transaction could not be executed, because the gateway cannot
// send the specified announcement.
val UnsupportedAnnouncement = Error(514)
// The transaction refers to an incorrect connection-id (may have
// been already deleted).
val UnknownConnectionId = Error(515)
// The transaction refers to an unknown call-id, or the call-id
// supplied is incorrect (e.g., connection-id not associated with
// this call-id).
val UnknownCallId = Error(516)
// Unsupported or invalid mode.
val UnsupportedMode = Error(517)
// Unsupported or unknown package. It is RECOMMENDED to include a
// PackageList parameter with the list of supported packages in the
// response, especially if the response is generated by the Call
// Agent.
val UnknownPackage = Error(518)
// Endpoint does not have a digit map.
val UnsupportedDigitMap = Error(519)
// The transaction could not be executed, because the endpoint is
// "restarting". In most cases this would be a transient error, in
// which case, error code 405 SHOULD be used instead. The error
// code is only included here for backwards compatibility.
val EndpointRestarting = Error(520)
// Endpoint redirected to another Call Agent. The associated
// redirection behavior is only well-defined when this response is
// issued for a RestartInProgress command.
val RedirectedToOtherAgent = Error(521)
// No such event or signal. The request referred to an event or
// signal that is not defined in the relevant package (which could
// be the default package).
val UnknownEventOrSignal = Error(522)
// Unknown action or illegal combination of actions.
val UnsupportedAction = Error(523)
// Internal inconsistency in LocalConnectionOptions.
val InternalIncosistency = Error(524)
// Unknown extension in LocalConnectionOptions. This code SHOULD be
// used for unsupported mandatory vendor extensions ("x+").
val UnknonwExtension = Error(525)
// Insufficient bandwidth. In cases where this is a transient
// error, error code 404 SHOULD be used instead.
val InsufficientBandwidthPermanent = Error(526)
// Missing RemoteConnectionDescriptor.
val MissingRemoteDescriptor = Error(527)
// Incompatible protocol version.
val IncompatibleVersion = Error(528)
// Internal hardware failure.
val HardwareFailure = Error(529)
// CAS signaling protocol error.
val CASSignallingError = Error(530)
// Failure of a grouping of trunks (e.g., facility failure).
val TrunkFailure = Error(531)
// Unsupported value(s) in LocalConnectionOptions.
val UnsupportedValue = Error(532)
// Response too large.
val ResponseTooLarge = Error(533)
// Codec negotiation failure.
val CodecNegotiationFailure = Error(534)
// Packetization period not supported.
val UnsupportedPacketizationPeriod = Error(535)
// Unknown or unsupported RestartMethod.
val UnsupportedRestartMethod = Error(536)
// Unknown or unsupported digit map extension.
val UnsupportedDigitMapExtension = Error(537)
// Event/signal parameter error (e.g., missing, erroneous,
// unsupported, unknown, etc.).
val EventOrSignalParameterError = Error(538)
// Invalid or unsupported command parameter. This code SHOULD only
// be used when the parameter is neither a package or vendor
// extension parameter.
val UnsupportedCommandParameter = Error(539)
// Per endpoint connection limit exceeded.
val ConnectionLimitExceeded = Error(540)
// Invalid or unsupported LocalConnectionOptions. This code SHOULD
// only be used when the LocalConnectionOptions is neither a package
// nor a vendor extension LocalConnectionOptions.
val UnsupportedLocalConnectionOption = Error(541)
}
sealed trait MGCPResponseCode { self =>
val code: Int
/** yields true if the code is provisional **/
def isProvisional: Boolean = self.isInstanceOf[Provisional]
def isSuccess: Boolean = self.isInstanceOf[Success]
}
| Spinoco/protocol | mgcp/src/main/scala/spinoco/protocol/mgcp/MGCPResponseCode.scala | Scala | mit | 9,074 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.convert
import java.io.{Closeable, IOException, InputStream}
import java.nio.charset.StandardCharsets
import com.typesafe.config.Config
import com.typesafe.scalalogging.LazyLogging
import org.apache.commons.io.IOUtils
import org.geotools.factory.Hints
import org.locationtech.geomesa.convert.ErrorMode.ErrorMode
import org.locationtech.geomesa.convert.ParseMode.ParseMode
import org.locationtech.geomesa.convert.Transformers._
import org.locationtech.geomesa.convert.ValidationMode.ValidationMode
import org.locationtech.geomesa.convert2.transforms.Expression
import org.locationtech.geomesa.convert2.transforms.Expression.LiteralNull
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import scala.collection.JavaConversions._
import scala.collection.immutable.IndexedSeq
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
@deprecated("Replaced with org.locationtech.geomesa.convert2.Field")
trait Field extends org.locationtech.geomesa.convert2.Field {
def name: String
def transform: Transformers.Expr
override def transforms: Option[Expression] = Option(transform)
override def eval(args: Array[Any])(implicit ec: EvaluationContext): Any = transform.eval(args)
}
/**
* Wrapper to present convert v2 fields as convert v1
*
* @param field v2 field
*/
case class FieldWrapper(field: org.locationtech.geomesa.convert2.Field) extends Field {
override def name: String = field.name
override def transform: Expr = field.transforms.map(ExpressionWrapper.apply).orNull
override def transforms: Option[Expression] = field.transforms
override def eval(args: Array[Any])(implicit ec: EvaluationContext): Any = field.eval(args)(ec)
}
@deprecated("Replaced with org.locationtech.geomesa.convert2.BasicField")
case class SimpleField(name: String, transform: Transformers.Expr) extends Field {
override val transforms: Option[Expression] = Option(transform)
override def toString: String = s"$name = $transform"
}
object StandardOption extends Enumeration {
type StandardOption = Value
@deprecated
val Validating = Value("validating")
@deprecated
val ValidationModeOpt = Value("validation-mode")
val ValidatorsOpt = Value("validators")
val ErrorModeOpt = Value("error-mode")
val LineModeOpt = Value("line-mode")
val ParseModeOpt = Value("parse-mode")
val VerboseOpt = Value("verbose")
implicit class StandardOptionValue(opt: Value) {
def path = s"options.$opt"
}
}
@deprecated("Replaced with org.locationtech.geomesa.convert.ErrorMode")
object ValidationMode extends Enumeration {
type ValidationMode = Value
val SkipBadRecords = Value(ErrorMode.SkipBadRecords.toString)
val RaiseErrors = Value(ErrorMode.RaiseErrors.toString)
val Default = SkipBadRecords
}
case class ConvertParseOpts(parseMode: ParseMode,
validator: SimpleFeatureValidator,
validationMode: ValidationMode,
verbose: Boolean)
@deprecated("Replaced with org.locationtech.geomesa.convert2.SimpleFeatureConverterFactory")
trait SimpleFeatureConverterFactory[I] {
def canProcess(conf: Config): Boolean
def buildConverter(sft: SimpleFeatureType, conf: Config): SimpleFeatureConverter[I]
}
@deprecated("Replaced with org.locationtech.geomesa.convert2.AbstractConverterFactory")
abstract class AbstractSimpleFeatureConverterFactory[I] extends SimpleFeatureConverterFactory[I] with LazyLogging {
override def canProcess(conf: Config): Boolean =
if (conf.hasPath("type")) conf.getString("type").equals(typeToProcess) else false
override def buildConverter(sft: SimpleFeatureType, conf: Config): SimpleFeatureConverter[I] = {
val idBuilder = buildIdBuilder(conf)
val fields = buildFields(conf)
val userDataBuilder = buildUserDataBuilder(conf)
val cacheServices = buildCacheService(conf)
val parseOpts = getParsingOptions(conf, sft)
buildConverter(sft, conf, idBuilder, fields, userDataBuilder, cacheServices, parseOpts)
}
protected def typeToProcess: String
protected def buildConverter(sft: SimpleFeatureType,
conf: Config,
idBuilder: Expr,
fields: IndexedSeq[Field],
userDataBuilder: Map[String, Expr],
cacheServices: Map[String, EnrichmentCache],
parseOpts: ConvertParseOpts): SimpleFeatureConverter[I]
protected def buildFields(conf: Config): IndexedSeq[Field] =
conf.getConfigList("fields").map(buildField).toIndexedSeq
protected def buildField(field: Config): Field =
SimpleField(field.getString("name"), Transformers.parseTransform(field.getString("transform")))
protected def buildIdBuilder(conf: Config): Expr = {
if (conf.hasPath("id-field")) {
Transformers.parseTransform(conf.getString("id-field"))
} else {
Transformers.parseTransform("null")
}
}
protected def buildUserDataBuilder(conf: Config): Map[String, Expr] = {
if (conf.hasPath("user-data")) {
conf.getConfig("user-data").entrySet.map { e =>
e.getKey -> Transformers.parseTransform(e.getValue.unwrapped().toString)
}.toMap
} else {
Map.empty
}
}
protected def buildCacheService(config: Config): Map[String, EnrichmentCache] = {
if(config.hasPath("caches")) {
val cacheConfig = config.getConfig("caches")
cacheConfig.root().keys.map { k =>
val specificConf = cacheConfig.getConfig(k)
k -> EnrichmentCache(specificConf)
}.toMap
} else {
Map.empty
}
}
protected def getParsingOptions(conf: Config, sft: SimpleFeatureType): ConvertParseOpts = {
val verbose = if (conf.hasPath(StandardOption.VerboseOpt.path)) conf.getBoolean(StandardOption.VerboseOpt.path) else false
val opts = ConvertParseOpts(getParseMode(conf), getValidator(conf, sft), getValidationMode(conf), verbose = verbose)
logger.info(s"Using ParseMode ${opts.parseMode} with error mode ${opts.validationMode} and validator ${opts.validator.name}")
opts
}
// noinspection ScalaDeprecation
protected def getValidator(conf: Config, sft: SimpleFeatureType): SimpleFeatureValidator = {
val validators: Seq[String] =
if (conf.hasPath(StandardOption.Validating.path) && conf.hasPath(StandardOption.ValidatorsOpt.path)) {
// This is when you have the old deprecated key...
throw new IllegalArgumentException(s"Converter should not have both ${StandardOption.Validating.path}(deprecated) and " +
s"${StandardOption.ValidatorsOpt.path} config keys")
} else if (conf.hasPath(StandardOption.ValidatorsOpt.path)) {
conf.getStringList(StandardOption.ValidatorsOpt.path)
} else if (conf.hasPath(StandardOption.Validating.path)) {
logger.warn(s"Using deprecated validation key ${StandardOption.Validating.path}")
if (conf.getBoolean(StandardOption.Validating.path)) {
Seq("has-geo", "has-dtg")
} else {
Seq("none")
}
} else {
Seq("has-geo", "has-dtg")
}
val validator = SimpleFeatureValidator(validators)
validator.init(sft)
validator
}
protected def getParseMode(conf: Config): ParseMode =
if (conf.hasPath(StandardOption.ParseModeOpt.path)) {
val modeStr = conf.getString(StandardOption.ParseModeOpt.path)
try {
ParseMode.withName(modeStr)
} catch {
case _: NoSuchElementException => throw new IllegalArgumentException(s"Unknown parse mode $modeStr")
}
} else {
ParseMode.Default
}
protected def getValidationMode(conf: Config): ValidationMode = {
if (conf.hasPath(StandardOption.ErrorModeOpt.path)) {
val modeStr = conf.getString(StandardOption.ErrorModeOpt.path)
try { ValidationMode.withName(modeStr) } catch {
case _: NoSuchElementException => throw new IllegalArgumentException(s"Unknown error mode $modeStr")
}
} else if (conf.hasPath(StandardOption.ValidationModeOpt.path)) {
logger.warn(s"Using deprecated option ${StandardOption.ValidationModeOpt}. Prefer ${StandardOption.ErrorModeOpt}")
val modeStr = conf.getString(StandardOption.ValidationModeOpt.path)
try { ValidationMode.withName(modeStr) } catch {
case _: NoSuchElementException => throw new IllegalArgumentException(s"Unknown validation mode $modeStr")
}
} else {
ValidationMode.Default
}
}
}
@deprecated("Replaced with org.locationtech.geomesa.convert2.SimpleFeatureConverter")
trait SimpleFeatureConverter[I] extends Closeable {
/**
* Result feature type
*/
def targetSFT: SimpleFeatureType
/**
* Enrichment caches
*
* @return
*/
def caches: Map[String, EnrichmentCache]
/**
* Stream process inputs into simple features
*/
def processInput(is: Iterator[I], ec: EvaluationContext = createEvaluationContext()): Iterator[SimpleFeature]
def processSingleInput(i: I, ec: EvaluationContext = createEvaluationContext()): Iterator[SimpleFeature]
def process(is: InputStream, ec: EvaluationContext = createEvaluationContext()): Iterator[SimpleFeature]
/**
* Creates a context used for processing
*/
def createEvaluationContext(globalParams: Map[String, Any] = Map.empty,
counter: Counter = new DefaultCounter): EvaluationContext = {
val keys = globalParams.keys.toIndexedSeq
val values = keys.map(globalParams.apply).toArray
EvaluationContext(keys, values, counter, caches)
}
override def close(): Unit = {}
}
@deprecated("Replaced with org.locationtech.geomesa.convert2.SimpleFeatureConverter")
object SimpleFeatureConverter {
type Dag = scala.collection.mutable.Map[Field, Set[Field]]
/**
* Add the dependencies of a field to a graph
*
* @param field field to add
* @param fieldMap field lookup map
* @param dag graph
*/
def addDependencies(field: Field, fieldMap: Map[String, Field], dag: Dag): Unit = {
if (!dag.contains(field)) {
val deps = Option(field.transform).toSeq.flatMap(_.dependenciesOf(Set(field), fieldMap)).toSet
dag.put(field, deps)
deps.foreach(addDependencies(_, fieldMap, dag))
}
}
/**
* Returns vertices in topological order.
*
* Note: will cause an infinite loop if there are circular dependencies
*
* @param dag graph
* @return ordered vertices
*/
def topologicalOrder(dag: Dag): IndexedSeq[Field] = {
val res = ArrayBuffer.empty[Field]
val remaining = dag.keys.to[scala.collection.mutable.Queue]
while (remaining.nonEmpty) {
val next = remaining.dequeue()
if (dag(next).forall(res.contains)) {
res.append(next)
} else {
remaining.enqueue(next) // put at the back of the queue
}
}
res.toIndexedSeq
}
}
/**
* Base trait to create a simple feature converter
*/
@deprecated("Replaced with org.locationtech.geomesa.convert2.AbstractConverter")
trait ToSimpleFeatureConverter[I] extends SimpleFeatureConverter[I] with LazyLogging {
def targetSFT: SimpleFeatureType
def inputFields: Seq[Field]
def idBuilder: Expr
def userDataBuilder: Map[String, Expr]
def caches: Map[String, EnrichmentCache]
def fromInputType(i: I, ec: EvaluationContext): Iterator[Array[Any]]
def parseOpts: ConvertParseOpts
protected val validate: (SimpleFeature, EvaluationContext) => SimpleFeature =
(sf: SimpleFeature, ec: EvaluationContext) => {
if (parseOpts.validator.validate(sf)) { sf } else {
val msg = s"Invalid SimpleFeature on line ${ec.counter.getLineCount}: ${parseOpts.validator.lastError}"
if (parseOpts.validationMode == ValidationMode.RaiseErrors) {
throw new IOException(msg)
} else {
logger.debug(msg)
null
}
}
}
protected val requiredFields: IndexedSeq[Field] = {
import SimpleFeatureConverter.{addDependencies, topologicalOrder}
val fieldNameMap = inputFields.map(f => (f.name, f)).toMap
val dag = scala.collection.mutable.Map.empty[Field, Set[Field]]
// compute only the input fields that we need to deal with to populate the simple feature
targetSFT.getAttributeDescriptors.foreach { ad =>
fieldNameMap.get(ad.getLocalName).foreach(addDependencies(_, fieldNameMap, dag))
}
// add id field and user data deps - these will be evaluated last so we only need to add their deps
val others = (userDataBuilder.values.toSeq :+ idBuilder).flatMap(_.dependenciesOf(Set.empty, fieldNameMap))
others.foreach(addDependencies(_, fieldNameMap, dag))
// use a topological ordering to ensure that dependencies are evaluated before the fields that require them
topologicalOrder(dag)
}
protected val requiredFieldsCount: Int = requiredFields.length
protected val requiredFieldsIndices: IndexedSeq[Int] = requiredFields.map(f => targetSFT.indexOf(f.name))
/**
* Convert input values into a simple feature with attributes
*/
def convert(t: Array[Any], ec: EvaluationContext): SimpleFeature = {
val sfValues = Array.ofDim[AnyRef](targetSFT.getAttributeCount)
var i = 0
while (i < requiredFieldsCount) {
try {
ec.set(i, requiredFields(i).eval(t)(ec))
} catch {
case e: Exception =>
val msg = if (parseOpts.verbose) {
val valuesStr = if (t.length > 0) t.tail.mkString(", ") else ""
s"Failed to evaluate field '${requiredFields(i).name}' " +
s"on line ${ec.counter.getLineCount} using values:\\n" +
s"${t.headOption.orNull}\\n[$valuesStr]" // head is the whole record
} else {
s"Failed to evaluate field '${requiredFields(i).name}' on line ${ec.counter.getLineCount}"
}
if (parseOpts.validationMode == ValidationMode.SkipBadRecords) {
if (parseOpts.verbose) { logger.debug(msg, e) } else { logger.debug(msg) }
return null
} else {
throw new IOException(msg, e)
}
}
val sftIndex = requiredFieldsIndices(i)
if (sftIndex != -1) {
sfValues.update(sftIndex, ec.get(i).asInstanceOf[AnyRef])
}
i += 1
}
val sf = buildFeature(t, sfValues, ec)
validate(sf, ec)
}
private def buildFeature(t: Array[Any], sfValues: Array[AnyRef], ec: EvaluationContext): SimpleFeature = {
val sf = idBuilder match {
case ExpressionWrapper(LiteralNull) => new ScalaSimpleFeature(targetSFT, "", sfValues) // empty feature id will be replaced with an auto-gen one
case _ =>
val id = idBuilder.eval(t)(ec).asInstanceOf[String]
val res = new ScalaSimpleFeature(targetSFT, id, sfValues)
res.getUserData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE)
res
}
userDataBuilder.foreach { case (k, v) => sf.getUserData.put(k, v.eval(t)(ec).asInstanceOf[AnyRef]) }
sf
}
/**
* Process a single input (e.g. line)
*/
override def processSingleInput(i: I, ec: EvaluationContext): Iterator[SimpleFeature] = {
ec.clear()
ec.counter.incLineCount()
val attributes = try { fromInputType(i, ec) } catch {
case e: Exception =>
logger.warn(s"Failed to parse input '$i'", e)
ec.counter.incFailure()
Iterator.empty
}
attributes.flatMap { a =>
convert(a, ec) match {
case null => ec.counter.incFailure(); Iterator.empty
case sf => ec.counter.incSuccess(); Iterator.single(sf)
}
}
}
override def createEvaluationContext(globalParams: Map[String, Any], counter: Counter): EvaluationContext = {
val globalKeys = globalParams.keys.toSeq
val names = requiredFields.map(_.name) ++ globalKeys
val values = Array.ofDim[Any](names.length)
// note, globalKeys are maintained even through EvaluationContext.clear()
globalKeys.zipWithIndex.foreach { case (k, i) => values(requiredFields.length + i) = globalParams(k) }
new EvaluationContextImpl(names, values, counter, caches)
}
override def processInput(is: Iterator[I], ec: EvaluationContext): Iterator[SimpleFeature] = {
parseOpts.parseMode match {
case ParseMode.Incremental =>
is.flatMap(i => processSingleInput(i, ec))
case ParseMode.Batch =>
val ret = mutable.ListBuffer.empty[SimpleFeature]
is.foreach(i => ret ++= processSingleInput(i, ec))
ret.iterator
}
}
}
@deprecated("Replaced with org.locationtech.geomesa.convert2.AbstractConverter")
trait LinesToSimpleFeatureConverter extends ToSimpleFeatureConverter[String] {
override def process(is: InputStream, ec: EvaluationContext): Iterator[SimpleFeature] =
processInput(IOUtils.lineIterator(is, StandardCharsets.UTF_8.displayName), ec)
}
| ddseapy/geomesa | geomesa-convert/geomesa-convert-common/src/main/scala/org/locationtech/geomesa/convert/SimpleFeatureConverterFactory.scala | Scala | apache-2.0 | 17,434 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark
import org.apache.carbondata.core.constants.CarbonCommonConstants
/**
* Contains all options for Spark data source
*/
class CarbonOption(options: Map[String, String]) {
def tableIdentifier: String = options.getOrElse("tableName", s"$dbName.$tableName")
def dbName: String = options.getOrElse("dbName", CarbonCommonConstants.DATABASE_DEFAULT_NAME)
def tableName: String = options.getOrElse("tableName", "default_table")
def tablePath: String = s"$dbName/$tableName"
def tableId: String = options.getOrElse("tableId", "default_table_id")
def partitionCount: String = options.getOrElse("partitionCount", "1")
def partitionClass: String = {
options.getOrElse("partitionClass",
"org.apache.carbondata.spark.partition.api.impl.SampleDataPartitionerImpl")
}
def tempCSV: Boolean = options.getOrElse("tempCSV", "true").toBoolean
def compress: Boolean = options.getOrElse("compress", "false").toBoolean
def singlePass: Boolean = options.getOrElse("single_pass", "false").toBoolean
def dictionaryInclude: Option[String] = options.get("dictionary_include")
def dictionaryExclude: Option[String] = options.get("dictionary_exclude")
def tableBlockSize: Option[String] = options.get("table_blocksize")
def bucketNumber: Int = options.getOrElse("bucketnumber", "0").toInt
def bucketColumns: String = options.getOrElse("bucketcolumns", "")
def isBucketingEnabled: Boolean = options.contains("bucketcolumns") &&
options.contains("bucketnumber")
def toMap: Map[String, String] = options
}
| ksimar/incubator-carbondata | integration/spark-common/src/main/scala/org/apache/carbondata/spark/CarbonOption.scala | Scala | apache-2.0 | 2,411 |
/*
* Copyright 2019 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.graphite
import java.util.concurrent.TimeUnit._
import com.codahale.metrics.graphite.{Graphite, GraphiteReporter}
import com.codahale.metrics.{MetricFilter, SharedMetricRegistries}
import play.api.{Application, Configuration, GlobalSettings, Logger}
@deprecated("Use DI", "-")
trait GraphiteConfig extends GlobalSettings {
def microserviceMetricsConfig(implicit app: Application) : Option[Configuration]
override def onStart(app: Application) {
super.onStart(app)
if (enabled(app)) {
startGraphite(app)
}
}
override def onStop(app: Application) {
super.onStop(app)
}
private def enabled(app: Application) : Boolean = metricsPluginEnabled(app) && graphitePublisherEnabled(app)
private def metricsPluginEnabled(app: Application) : Boolean = app.configuration.getBoolean("metrics.enabled").getOrElse(false)
private def graphitePublisherEnabled(app: Application) : Boolean = microserviceMetricsConfig(app).flatMap(_.getBoolean("graphite.enabled")).getOrElse(false)
private def registryName(app: Application) = app.configuration.getString("metrics.name").getOrElse("default")
private def startGraphite(implicit app: Application) {
Logger.info("Graphite metrics enabled, starting the reporter")
val metricsConfig = microserviceMetricsConfig.getOrElse(throw new Exception("The application does not contain required metrics configuration"))
val graphite = new Graphite(
metricsConfig.getString("graphite.host").getOrElse("graphite"),
metricsConfig.getInt("graphite.port").getOrElse(2003)
)
val prefix = metricsConfig.getString("graphite.prefix").getOrElse(s"tax.${app.configuration.getString("appName")}")
val reporter = GraphiteReporter.forRegistry(
SharedMetricRegistries.getOrCreate(registryName(app)))
.prefixedWith(s"$prefix.${java.net.InetAddress.getLocalHost.getHostName}")
.convertRatesTo(SECONDS)
.convertDurationsTo(MILLISECONDS)
.filter(MetricFilter.ALL)
.build(graphite)
reporter.start(metricsConfig.getLong("graphite.interval").getOrElse(10L), SECONDS)
}
}
| hmrc/play-graphite | src/main/scala/uk/gov/hmrc/play/graphite/GraphiteConfig.scala | Scala | apache-2.0 | 2,733 |
/* Copyright (C) 2008-2014 University of Massachusetts Amherst.
This file is part of "FACTORIE" (Factor graphs, Imperative, Extensible)
http://factorie.cs.umass.edu, http://github.com/factorie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
package cc.factorie.la
import cc.factorie.maths
import cc.factorie.util.{DenseDoubleSeq, IntSeq, RangeIntSeq, DoubleSeq}
trait DenseTensor extends Tensor with TensorWithMutableDefaultValue with DenseDoubleSeq with Serializable {
protected def _initialArray: Array[Double] = new Array[Double](length)
private var __values = _initialArray
if (__default != 0.0) java.util.Arrays.fill(__values, __default)
private var __default: Double = 0.0
override def defaultValue: Double = __default
def defaultValue_=(v:Double): Unit = __default = v
protected def _values = __values
protected def _valuesSize: Int = __values.size
protected def _resetValues(s:Int): Unit = __values = new Array[Double](s)
protected def _setArray(a:Array[Double]): Unit = { assert(a.length == length); __values = a }
def isDense = true
def activeDomain: IntSeq = new RangeIntSeq(0, length)
def apply(i:Int): Double = __values(i)
def activeDomainSize = length
override def update(i:Int, v:Double): Unit = __values(i) = v
override def zero(): Unit = java.util.Arrays.fill(__values, 0.0)
override def asArray = __values
override def *=(d: Double) = {
val myValues = __values
val len = myValues.length; var i = 0
while (i < len) { myValues(i) *= d; i += 1 }
}
override def +=(i:Int, incr:Double): Unit = __values(i) += incr
override def :=(ds:DoubleSeq): Unit = ds match {
case ds:DenseTensor => System.arraycopy(ds.__values, 0, __values, 0, length)
case ds:DoubleSeq => super.:=(ds)
}
def fill(f: ()=>Double): this.type = { var i = 0; val len = length; while (i < len) { __values(i) = f(); i += 1 }; this }
@deprecated("Use fill() instead.", "Before 2014-11-17")
def initializeRandomly(mean: Double = 0.0, variance: Double = 1.0)(implicit rng: scala.util.Random): Unit = { (0 until length).foreach(i => _values(i) = rng.nextGaussian()*variance + mean ) }
def forallActiveElements(f:(Int,Double)=>Boolean): Boolean = forallElements(f)
override def :=(a:Array[Double]): Unit = { require(a.length == length, "Expected length="+length+" but got "+a.length); System.arraycopy(a, 0, _values, 0, a.length) }
override def :=(a:Array[Double], offset:Int): Unit = System.arraycopy(a, offset, __values, 0, length)
var hasLogged = false
override def dot(t2:DoubleSeq): Double = t2 match {
case t2:SingletonBinaryTensor => apply(t2.singleIndex)
case t2:SingletonTensor => apply(t2.singleIndex) * t2.singleValue
case t2:DenseTensor => {
val myValues = __values
val otherValues = t2.__values
val len = length; assert(len == t2.length); var result = 0.0; var i = 0
while (i < len) { result += myValues(i) * otherValues(i); i += 1 }
result
}
// case t2:SparseBinaryTensor => {
// var s = 0.0; t2.foreachActiveElement((i,v) => s += __values(i)); s
// }
// case t:UniformTensor => sum * t.uniformValue
// TODO Any other special cases here?
// case t2:SparseIndexedTensor => {var s = 0.0;t2.foreachActiveElement((i,v) => s += __values(i)*v);s}
case t: Tensor =>
// can we just do this? since dense things are easy for other tensors to dot against cause they can grab the array -luke
t dot this
case t2:DoubleSeq => { // TODO Consider removing this to catch inefficiency
if (!hasLogged) {
hasLogged = true
println("Warning: DenseTensor slow dot for type " + t2.getClass.getName)
}
val len = length; assert(len == t2.length); var result = 0.0; var i = 0
while (i < len) { result += apply(i) * t2(i); i += 1 }; result
}
}
override def +=(t:DoubleSeq, f:Double): Unit = t match {
case t: SingletonBinaryLayeredTensor2 => {
val i0 = t.singleIndex1
t.inner match {
case inner: SingletonTensor => {
val i = inner.singleIndex
this(t.singleIndex(i0, i)) += inner.singleValue * f
}
case inner: SparseBinaryTensorLike1 => {
var i = 0
val indices = inner.activeDomain
while (i < indices.length) {
this(t.singleIndex(i0, indices(i))) += f
i += 1
}
}
case inner: SparseIndexedTensor => {
inner.foreachActiveElement((i, v) => {
this(t.singleIndex(i0, i)) += v * f
})
}
case _ => sys.error(t.inner.getClass.getName + " doesn't match")
}
}
case t:SingletonBinaryTensor => __values(t.singleIndex) += f
case t:SingletonTensor => __values(t.singleIndex) += f * t.singleValue
case t:SparseBinaryTensor => t.=+(__values, f)
case t:DenseTensor => {
val myValues = __values
val otherValues = t.__values
val len = t.length; var i = 0
while (i < len) { myValues(i) += f * otherValues(i); i += 1 }
}
case t:UniformTensor => {
val myValues = __values
val len = length; val u = t.uniformValue * f; var i = 0
while (i < len) { myValues(i) += u; i += 1 }
}
case t:TensorTimesScalar => this.+=(t.tensor, t.scalar) //{ t.tensor.activeDomain.foreach(i => this(i) += t(i)*t.scalar) }
case t:Outer1Tensor2 => {
val ff = f*t.scale
require(this.isInstanceOf[Tensor2]) // Makes sure rank matches!
t =+ (__values, ff)
}
case t:Tensor => t.=+(this.asArray, f)
}
/** Increment into this DenseTensor at an offset. */
def +=(t:DoubleSeq, offset:Int, f:Double): Unit = t match {
case t:SingletonBinaryTensor => __values(offset + t.singleIndex) += f
case t:SingletonTensor => __values(offset + t.singleIndex) += f * t.singleValue
case t:SparseBinaryTensorLike1 => t.=+(__values, offset, f)
case t:DenseTensor => {
val myValues = __values
val otherValues = t.__values
val len = t.length; var i = 0
while (i < len) { myValues(i+offset) += f * otherValues(i); i += 1 }
}
}
// A little faster than the MutableDoubleSeq implementation because it can access the __values array directly
override def expNormalize(): Double = {
var sum = maths.sumLogProbs(this)
this -= sum
var i = 0
while (i < length) {
this(i) = math.exp(this(i))
i += 1
}
sum
}
def euclideanDistance(t:DenseTensor): Double = {
var d = 0.0
val len = length; var i = 0; while (i < len) {
val diff = __values(i) - t.__values(i)
d += diff * diff
i += 1
}
math.sqrt(d)
}
}
| patverga/factorie | src/main/scala/cc/factorie/la/DenseTensor.scala | Scala | apache-2.0 | 7,140 |
/**
* © 2019 Refinitiv. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.util.http
import java.net.URLEncoder.encode
object StringPath {
import scala.language.implicitConversions
implicit def stringPath2String(sp: StringPath): String = sp.url
//e.g StringPath("http://localhost:9000")
def apply(withProtocol: String) = new StringPath(withProtocol)
//e.g StringPath.host("localhost:9000")
def host(domainAndPort: String) = new StringPath("http://" + domainAndPort)
//e.g StringPath.host("localhost",9000)
def host(domain: String, port: Int) = new StringPath("http://" + domain + s":$port")
//e.g StringPath.sHost("localhost:9000")
def sHost(domainAndPort: String) = new StringPath("https://" + domainAndPort)
//e.g StringPath.sHost("localhost",9000)
def sHost(domain: String, port: Int) = new StringPath("https://" + domain + s":$port")
}
class StringPath private (val url: String) {
def /(pathPart: String) = new StringPath(url + s"/${encode(pathPart, "UTF-8")}")
def h(fragment: String) = new StringPath(url + s"#${encode(fragment, "UTF-8")}")
@inline def ⋕(fragment: String) =
h(fragment) // other hash unicode look-alikes: '⌗','♯' ('#' - %23, is illegal as a method name in scala...)
}
| dudi3001/CM-Well | server/cmwell-util/src/main/scala/cmwell/util/http/StringPath.scala | Scala | apache-2.0 | 1,806 |
package fpinscala.laziness
import scala.Option
trait Stream[+A] {
def uncons: Option[(A, Stream[A])]
def isEmpty: Boolean = uncons.isEmpty
def foldRight[B](z: => B)(f: (A, => B) => B): B =
uncons match {
case Some((h, t)) => f(h, t.foldRight(z)(f))
case None => z
}
def exists(p: A => Boolean): Boolean =
foldRight(false)((a, b) => p(a) || b)
def takeWhile(p: A => Boolean): Stream[A] = uncons match {
case None => Stream.empty
case Some((head, tail)) => if (p(head)) Stream.cons(head, tail.takeWhile(p)) else Stream.empty
}
def takeWhileWithFold(p: A => Boolean): Stream[A] = foldRight(Stream.empty: Stream[A])((elem, acc) => {
print("."); if (p(elem)) Stream.cons(elem, acc) else Stream.empty[A]
})
def forAll(p: A => Boolean): Boolean = foldRight(true)({
case (value, status) => p(value) && status
})
def toList: List[A] = foldRight(List.empty: List[A])((elem, accumulator) => elem :: accumulator)
def take(n: Int): Stream[A] = if (n == 0) Stream.empty
else uncons match {
case None => Stream.empty
case Some((head, tail)) => Stream.cons(head, tail.take(n - 1))
}
def asString: String = uncons match {
case Some((h, t)) => h.toString
case None => "NONE"
}
def takeWithFold(n: Int): Stream[A] = {
type Accumulator = (Stream[A], Int)
foldRight((Stream.empty, n): Accumulator)((elem, accumulator) => {
print(".")
if (accumulator._2 > 0) (Stream.cons(elem, accumulator._1), accumulator._2 - 1) else (Stream.empty[A], 0)
}
)._1
}
def mapOrig[B](f: A => B): Stream[B] = uncons match {
case None => Stream.empty[B]
case Some((head, tail)) => Stream.cons(f(head), tail.mapOrig(f))
}
def map[B](f: A => B): Stream[B] = Stream.unfold(uncons)(_.map(tuple => (f(tuple._1), tuple._2.uncons)))
def mapViaUnfold[B](f: A => B): Stream[B] =
Stream.unfold(this)(_.uncons.map(tuple => (f(tuple._1), tuple._2)))
def append[B >: A](other: Stream[B]): Stream[B] = foldRight(other)((head, tail) => Stream.cons(head, tail))
def scanRightStefano[B](z: => B)(f: (A, => B) => B): Stream[B] = {
uncons match {
case Some((h, t)) => {
val tail: Stream[B] = t.scanRightStefano(z)(f)
tail.uncons match {
case Some((tailHead, tailTail)) => Stream.cons(f(h, tailHead), tail)
case None => Stream.cons(f(h, z), Stream.cons(z, Stream.empty))
}
}
case None => Stream.empty
}
}
def scanRightStefanoWithFold[B](z: => B)(f: (A, => B) => B): Stream[B] = {
foldRight((z, Stream(z)))(
(elem, accumulator) => {
Console.println(".")
val head = f(elem, accumulator._1)
(head, Stream.cons(head, accumulator._2))
}
)._2
}
def tails = scanRightStefano[Stream[A]](Stream.empty[A])((elem, accumulator) => Stream.cons(elem, accumulator))
}
object Stream {
def empty[A]: Stream[A] =
new Stream[A] {
def uncons = None
}
def cons[A](hd: => A, tl: => Stream[A]): Stream[A] =
new Stream[A] {
lazy val uncons = Some((hd, tl))
}
def apply[A](as: A*): Stream[A] =
if (as.isEmpty) empty
else cons(as.head, apply(as.tail: _*))
val ones: Stream[Int] = cons(1, ones)
def consts[A](value: A): Stream[A] = cons(value, consts(value))
def from(n: Int): Stream[Int] = cons(n, from(n + 1))
val fibs: Stream[Int] = {
def fib(first: Int, second: Int): Stream[Int] = cons(first + second, fib(second, first + second))
cons(0, cons(1, fib(0, 1)))
}
def unfoldUberto[A, S](z: S)(f: S => Option[(A, S)]): Stream[A] = {
def unfoldr(state: S): Stream[A] = {
val next = f(state)
next match {
case None => Stream.empty
case Some(nextState) => cons(nextState._1, unfoldr(nextState._2))
}
}
unfoldr(z)
}
def unfold[A, S](z: S)(f: S => Option[(A, S)]): Stream[A] = f(z) match {
case None => Stream.empty
case Some(nextState) => cons(nextState._1, unfold(nextState._2)(f))
}
def startsWith[A](s: Stream[A], s2: Stream[A]): Boolean = sys.error("todo")
def startsWithStefano[A](stream: Stream[A], prefix: Stream[A]): Boolean = {
val streamTerminatedWithNone = stream.map(Some(_)).append(consts(None))
val prefixTerminatedWithNone = prefix.map(Some(_)).append(consts(None))
zip(streamTerminatedWithNone, prefixTerminatedWithNone) takeWhile ({
case (left, right) => left.isDefined || right.isDefined
}) forAll {
case (Some(left), Some(right)) => left == right
case (Some(_), None) => true
case _ => false
}
}
def zip[A, B](s: Stream[A], s2: Stream[B]): Stream[(A, B)] = unfold((s, s2))(
{
case (st1: Stream[A], st2: Stream[B]) => (st1.uncons, st2.uncons) match {
case (Some((st1h: A, st1t: Stream[A])), Some((st2h: B, st2t: Stream[B]))) => Some((st1h, st2h), (st1t, st2t))
case _ => None
}
}
)
} | galarragas/FpInScala | exercises/src/main/scala/fpinscala/laziness/Stream.scala | Scala | mit | 4,911 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wvlet.dataflow.api.v1
/**
*/
sealed trait TaskStatus {
def isDone: Boolean
def name: String = toString
}
object TaskStatus {
/**
* Task is queued at the coordinator
*/
case object QUEUED extends TaskStatus {
override def isDone: Boolean = false
}
/**
* Task is starting at the coordinator
*/
case object STARTING extends TaskStatus {
override def isDone: Boolean = false
}
/**
* Task is running at a worker
*/
case object RUNNING extends TaskStatus {
override def isDone: Boolean = false
}
case object FINISHED extends TaskStatus {
override def isDone: Boolean = true
}
case object FAILED extends TaskStatus {
override def isDone: Boolean = true
}
case object CANCELLED extends TaskStatus {
override def isDone: Boolean = true
}
def all: Seq[TaskStatus] = Seq(QUEUED, STARTING, RUNNING, FINISHED, FAILED, CANCELLED)
def unapply(s: String): Option[TaskStatus] = {
all.find(_.name == s)
}
}
| wvlet/wvlet | wvlet-api/src/main/scala/wvlet/dataflow/api/v1/TaskStatus.scala | Scala | apache-2.0 | 1,558 |
package com.solarmosaic.client.utilityApi.json
import spray.json.DefaultJsonProtocol
trait JsonSupport extends DefaultJsonProtocol
with IsoDateTimeJsonSupport
with SnakeCaseJsonSupport
| solarmosaic/utility-api-client | src/main/scala/com/solarmosaic/client/utilityApi/json/JsonSupport.scala | Scala | mit | 191 |
/**
* Copyright (c) 2012 Alexey Aksenov [email protected]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scala.util
object HackDoggyCode {
def fixFreezeOnStartup {
Properties.getClass.getMethods.find(_.getName == "scala$util$PropertiesTrait$_setter_$propFilename_$eq").
foreach(_.invoke(Properties, ""))
}
} | ezh/digi-lib-ctrl | src/main/scala/scala/util/HackDoggyCode.scala | Scala | apache-2.0 | 844 |
package com.github.gdefacci.di.tests
import com.github.gdefacci.di.runtime.Bind
import com.github.gdefacci.di.runtime.AllBindings
import com.github.gdefacci.di.runtime.ApplicationScope
object samples2 {
trait Repository {
def conn: Connection
}
case class User(name: String)
case class Connection(user: User)
case class SqlRepo(val conn: Connection) extends Repository
case class TestRepo(admin: Boolean) extends Repository {
val conn = Connection(User(if (admin) "admin" else "guest"))
}
trait HttpClient
case class HttpClientImpl(user: User) extends HttpClient
case class TestHttpClient(timeout: Int) extends HttpClient
case class Service(val repository: Repository)
trait ServiceProvider {
def getService(usr: User): Service
}
abstract class AbstractServiceProvider(val b: Boolean) {
def getService(usr: User): Service
}
case class ServiceA(httpClient: HttpClient, repo: Repository)
object module1 {
def repo1(conn: Connection) = new SqlRepo(conn)
}
object module2A {
val repo1 = new TestRepo(true)
}
object module2B {
val repo1Binding = Bind[Repository, TestRepo]
}
object module2 {
def repo1(b: Boolean) = new TestRepo(b)
}
object module3 {
val mybool = true
}
object module4 {
val bindRepo = Bind[Repository, TestRepo]
val bindHttp = Bind[HttpClient, TestHttpClient]
val timeout = 10
}
case class ServiceDRepo(repo1:Repository, repo2:Repository)
object module6 {
case class TestRepo(admin: Boolean) extends Repository {
val conn = Connection(User(if (admin) "admin" else "guest"))
}
case class TestHttpClient(timeout: Int) extends HttpClient
val timeout = 10
}
case class Service1(val repository: Repository)
case class Service2(service:Service, service1:Service1)
class MyRepo extends Repository {
def conn: Connection = Connection(User("my"))
}
object module7 {
@ApplicationScope
val bindRepo = Bind[Repository, MyRepo]
}
class GItm()
class GCl[T](val v:T)
case class GenBiGCl(a:GCl[GItm], b:GCl[Boolean], c:GCl[GItm])
object module8 {
val b = true
val gitm = new GItm
@ApplicationScope
def createGCl[T](v:T):GCl[T] = new GCl[T](v)
}
}
| gdefacci/di | macros/src/test/scala/com/github/gdefacci/di/tests/samples2.scala | Scala | mit | 2,305 |
package ch.fram.medlineGeo.crunching.localize
import ch.fram.medlineGeo.models.Location
/**
* Created by alex on 14/09/15.
*/
package object geonames {
trait GeonamesEntity {
val id: Long
val name: String
}
case class GeonamesCountry(id: Long, iso: String, name: String) extends GeonamesEntity
case class GeonamesCity(id: Long, name: String, location: Location, population: Int) extends GeonamesEntity
}
| alexmasselot/medlineGeoBackend | app/ch/fram/medlineGeo/crunching/localize/geonames/package.scala | Scala | mit | 427 |
package com.twitter.finagle.httpx
import com.twitter.finagle.httpx.netty.Bijections
import com.twitter.finagle.netty3.{ChannelBufferBuf, BufChannelBuffer}
import com.twitter.util.Base64StringEncoder
import com.twitter.io.Buf
import java.net.URL
import org.jboss.netty.buffer.{ChannelBuffer, ChannelBuffers}
import org.jboss.netty.handler.codec.http.multipart.{DefaultHttpDataFactory, HttpPostRequestEncoder, HttpDataFactory}
import org.jboss.netty.handler.codec.http.{HttpRequest, HttpHeaders, HttpVersion, HttpMethod, DefaultHttpRequest}
import scala.annotation.implicitNotFound
import scala.collection.JavaConversions._
import scala.collection.mutable.ListBuffer
import Bijections._
/*
* HTML form element.
*/
sealed abstract class FormElement
/*
* HTML form simple input field.
*/
case class SimpleElement(name: String, content: String) extends FormElement
/*
* HTML form file input field.
*/
case class FileElement(name: String, content: Buf, contentType: Option[String] = None,
filename: Option[String] = None) extends FormElement
/**
* Provides a class for building [[org.jboss.netty.handler.codec.http.HttpRequest]]s.
* The main class to use is [[com.twitter.finagle.httpx.RequestBuilder]], as so
*
* {{{
* val getRequest = RequestBuilder()
* .setHeader(HttpHeaders.Names.USER_AGENT, "MyBot")
* .setHeader(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE)
* .url(new URL("http://www.example.com"))
* .buildGet()
* }}}
*
* The `RequestBuilder` requires the definition of `url`. In Scala,
* this is statically type checked, and in Java the lack of any of
* a url causes a runtime error.
*
* The `buildGet`, 'buildHead`, `buildPut`, and `buildPost` methods use an implicit argument
* to statically typecheck the builder (to ensure completeness, see above).
* The Java compiler cannot provide such implicit, so we provide separate
* functions in Java to accomplish this. Thus, the Java code for the
* above is
*
* {{{
* HttpRequest getRequest =
* RequestBuilder.safeBuildGet(
* RequestBuilder.create()
* .setHeader(HttpHeaders.Names.USER_AGENT, "MyBot")
* .setHeader(HttpHeaders.Names.CONNECTION, HttpHeaders.Values.KEEP_ALIVE)
* .url(new URL("http://www.example.com")))
* }}}
*
* Overall RequestBuilder's pretty barebones. It does provide certain protocol level support
* for more involved requests. For example, it support easy creation of POST request to submit
* multipart web forms with `buildMultipartPost` and default form post with `buildFormPost`.
*/
/**
* Factory for [[com.twitter.finagle.httpx.RequestBuilder]] instances
*/
object RequestBuilder {
@implicitNotFound("Http RequestBuilder is not correctly configured: HasUrl (exp: Yes): ${HasUrl}, HasForm (exp: Nothing) ${HasForm}.")
private trait RequestEvidence[HasUrl, HasForm]
private object RequestEvidence {
implicit object FullyConfigured extends RequestEvidence[RequestConfig.Yes, Nothing]
}
@implicitNotFound("Http RequestBuilder is not correctly configured for form post: HasUrl (exp: Yes): ${HasUrl}, HasForm (exp: Yes): ${HasForm}.")
private trait PostRequestEvidence[HasUrl, HasForm]
private object PostRequestEvidence {
implicit object FullyConfigured extends PostRequestEvidence[RequestConfig.Yes, RequestConfig.Yes]
}
type Complete = RequestBuilder[RequestConfig.Yes, Nothing]
type CompleteForm = RequestBuilder[RequestConfig.Yes, RequestConfig.Yes]
def apply() = new RequestBuilder()
/**
* Used for Java access.
*/
def create() = apply()
/**
* Provides a typesafe `build` with content for Java.
*/
def safeBuild(builder: Complete, method: Method, content: Option[Buf]): Request =
builder.build(method, content)(RequestEvidence.FullyConfigured)
/**
* Provides a typesafe `buildGet` for Java.
*/
def safeBuildGet(builder: Complete): Request =
builder.buildGet()(RequestEvidence.FullyConfigured)
/**
* Provides a typesafe `buildHead` for Java.
*/
def safeBuildHead(builder: Complete): Request =
builder.buildHead()(RequestEvidence.FullyConfigured)
/**
* Provides a typesafe `buildDelete` for Java.
*/
def safeBuildDelete(builder: Complete): Request =
builder.buildDelete()(RequestEvidence.FullyConfigured)
/**
* Provides a typesafe `buildPut` for Java.
*/
def safeBuildPut(builder: Complete, content: Buf): Request =
builder.buildPut(content)(RequestEvidence.FullyConfigured)
/**
* Provides a typesafe `buildPut` for Java.
*/
@deprecated("Typo, use safeBuildPut instead", "5.3.7")
def safeBuidlPut(builder: Complete, content: Buf): Request =
safeBuildPut(builder, content)
/**
* Provides a typesafe `buildPost` for Java.
*/
def safeBuildPost(builder: Complete, content: Buf): Request =
builder.buildPost(content)(RequestEvidence.FullyConfigured)
/**
* Provides a typesafe `buildFormPost` for Java.
*/
def safeBuildFormPost(builder: CompleteForm, multipart: Boolean): Request =
builder.buildFormPost(multipart)(PostRequestEvidence.FullyConfigured)
}
object RequestConfig {
sealed abstract trait Yes
type FullySpecifiedConfig = RequestConfig[Yes, Nothing]
type FullySpecifiedConfigForm = RequestConfig[Yes, Yes]
}
private[httpx] final case class RequestConfig[HasUrl, HasForm](
url: Option[URL] = None,
headers: Map[String, Seq[String]] = Map.empty,
formElements: Seq[FormElement] = Nil,
version: Version = Version.Http11,
proxied: Boolean = false
)
class RequestBuilder[HasUrl, HasForm] private[httpx](
config: RequestConfig[HasUrl, HasForm]
) {
import RequestConfig._
type This = RequestBuilder[HasUrl, HasForm]
private[this] val SCHEME_WHITELIST = Seq("http","https")
private[httpx] def this() = this(RequestConfig())
/*
* Specify url as String
*/
def url(u: String): RequestBuilder[Yes, HasForm] = url(new java.net.URL(u))
/**
* Specify the url to request. Sets the HOST header and possibly
* the Authorization header using the authority portion of the URL.
*/
def url(u: URL): RequestBuilder[Yes, HasForm] = {
require(SCHEME_WHITELIST.contains(u.getProtocol), "url must be http(s)")
val uri = u.toURI
val host = uri.getHost.toLowerCase
val hostValue =
if (u.getPort == -1 || u.getDefaultPort == u.getPort)
host
else
"%s:%d".format(host, u.getPort)
val withHost = config.headers.updated(HttpHeaders.Names.HOST, Seq(hostValue))
val userInfo = uri.getUserInfo
val updated =
if (userInfo == null || userInfo.isEmpty)
withHost
else {
val auth = "Basic " + Base64StringEncoder.encode(userInfo.getBytes)
withHost.updated(HttpHeaders.Names.AUTHORIZATION, Seq(auth))
}
new RequestBuilder(config.copy(url = Some(u), headers = updated))
}
/*
* Add simple form name/value pairs. In this mode, this RequestBuilder will only
* be able to generate a multipart/form POST request.
*/
def addFormElement(kv: (String, String)*): RequestBuilder[HasUrl, Yes] = {
val elems = config.formElements
val updated = kv.foldLeft(elems) { case (es, (k, v)) => es :+ new SimpleElement(k, v) }
new RequestBuilder(config.copy(formElements = updated))
}
/*
* Add a FormElement to a request. In this mode, this RequestBuilder will only
* be able to generate a multipart/form POST request.
*/
def add(elem: FormElement): RequestBuilder[HasUrl, Yes] = {
val elems = config.formElements
val updated = elems ++ Seq(elem)
new RequestBuilder(config.copy(formElements = updated))
}
/*
* Add a group of FormElements to a request. In this mode, this RequestBuilder will only
* be able to generate a multipart/form POST request.
*/
def add(elems: Seq[FormElement]): RequestBuilder[HasUrl, Yes] = {
val first = this.add(elems.head)
elems.tail.foldLeft(first) { (b, elem) => b.add(elem) }
}
/**
* Declare the HTTP protocol version be HTTP/1.0
*/
def http10(): This =
new RequestBuilder(config.copy(version = Version.Http10))
/**
* Set a new header with the specified name and value.
*/
def setHeader(name: String, value: String): This = {
val updated = config.headers.updated(name, Seq(value))
new RequestBuilder(config.copy(headers = updated))
}
/**
* Set a new header with the specified name and values.
*/
def setHeader(name: String, values: Seq[String]): This = {
val updated = config.headers.updated(name, values)
new RequestBuilder(config.copy(headers = updated))
}
/**
* Set a new header with the specified name and values.
*
* Java convenience variant.
*/
def setHeader(name: String, values: java.lang.Iterable[String]): This = {
setHeader(name, values.toSeq)
}
/**
* Add a new header with the specified name and value.
*/
def addHeader(name: String, value: String): This = {
val values = config.headers.get(name).getOrElse(Seq())
val updated = config.headers.updated(
name, values ++ Seq(value))
new RequestBuilder(config.copy(headers = updated))
}
/**
* Add group of headers expressed as a Map
*/
def addHeaders(headers: Map[String, String]): This = {
headers.foldLeft(this) { case (b, (k, v)) => b.addHeader(k, v) }
}
/**
* Declare the request will be proxied. Results in using the
* absolute URI in the request line.
*/
def proxied(): This = proxied(None)
/**
* Declare the request will be proxied. Results in using the
* absolute URI in the request line and setting the Proxy-Authorization
* header using the provided {{ProxyCredentials}}.
*/
def proxied(credentials: ProxyCredentials): This = proxied(Some(credentials))
/**
* Declare the request will be proxied. Results in using the
* absolute URI in the request line and optionally setting the
* Proxy-Authorization header using the provided {{ProxyCredentials}}.
*/
def proxied(credentials: Option[ProxyCredentials]): This = {
val headers: Map[String,Seq[String]] = credentials map { creds =>
config.headers.updated(HttpHeaders.Names.PROXY_AUTHORIZATION, Seq(creds.basicAuthorization))
} getOrElse config.headers
new RequestBuilder(config.copy(headers = headers, proxied = true))
}
/**
* Construct an HTTP request with a specified method.
*/
def build(method: Method, content: Option[Buf])(
implicit HTTP_REQUEST_BUILDER_IS_NOT_FULLY_SPECIFIED: RequestBuilder.RequestEvidence[HasUrl, HasForm]
): Request = {
content match {
case Some(content) => withContent(method, content)
case None => withoutContent(method)
}
}
/**
* Construct an HTTP GET request.
*/
def buildGet()(
implicit HTTP_REQUEST_BUILDER_IS_NOT_FULLY_SPECIFIED: RequestBuilder.RequestEvidence[HasUrl, HasForm]
): Request = withoutContent(Method.Get)
/**
* Construct an HTTP HEAD request.
*/
def buildHead()(
implicit HTTP_REQUEST_BUILDER_IS_NOT_FULLY_SPECIFIED: RequestBuilder.RequestEvidence[HasUrl, HasForm]
): Request = withoutContent(Method.Head)
/**
* Construct an HTTP DELETE request.
*/
def buildDelete()(
implicit HTTP_REQUEST_BUILDER_IS_NOT_FULLY_SPECIFIED: RequestBuilder.RequestEvidence[HasUrl, HasForm]
): Request = withoutContent(Method.Delete)
/**
* Construct an HTTP POST request.
*/
def buildPost(content: Buf)(
implicit HTTP_REQUEST_BUILDER_IS_NOT_FULLY_SPECIFIED: RequestBuilder.RequestEvidence[HasUrl, HasForm]
): Request = withContent(Method.Post, content)
/**
* Construct an HTTP PUT request.
*/
def buildPut(content: Buf)(
implicit HTTP_REQUEST_BUILDER_IS_NOT_FULLY_SPECIFIED: RequestBuilder.RequestEvidence[HasUrl, HasForm]
): Request = withContent(Method.Put, content)
/**
* Construct a form post request.
*/
def buildFormPost(multipart: Boolean = false) (
implicit HTTP_REQUEST_BUILDER_IS_NOT_FULLY_SPECIFIED: RequestBuilder.PostRequestEvidence[HasUrl, HasForm]
): Request = {
val dataFactory = new DefaultHttpDataFactory(false) // we don't use disk
val req = withoutContent(Method.Post)
val encoder = new HttpPostRequestEncoder(dataFactory, req.httpRequest, multipart)
config.formElements.foreach {
case FileElement(name, content, contentType, filename) =>
HttpPostRequestEncoderEx.addBodyFileUpload(encoder, dataFactory, req.httpRequest)(
name, filename.getOrElse(""),
BufChannelBuffer(content),
contentType.getOrElse(null),
false)
case SimpleElement(name, value) =>
encoder.addBodyAttribute(name, value)
}
val encodedReq = encoder.finalizeRequest()
if (encodedReq.isChunked) {
val encodings = encodedReq.headers.getAll(HttpHeaders.Names.TRANSFER_ENCODING)
encodings.remove(HttpHeaders.Values.CHUNKED)
if (encodings.isEmpty)
encodedReq.headers.remove(HttpHeaders.Names.TRANSFER_ENCODING)
else
encodedReq.headers.set(HttpHeaders.Names.TRANSFER_ENCODING, encodings)
val chunks = new ListBuffer[ChannelBuffer]
while (encoder.hasNextChunk) {
chunks += encoder.nextChunk().getContent()
}
encodedReq.setContent(ChannelBuffers.wrappedBuffer(chunks:_*))
}
from(encodedReq)
}
// absoluteURI if proxied, otherwise relativeURI
private[this] def resource(): String = {
val url = config.url.get
if (config.proxied) {
return url.toString
} else {
val builder = new StringBuilder()
val path = url.getPath
if (path == null || path.isEmpty)
builder.append("/")
else
builder.append(path)
val query = url.getQuery
if (query != null && !query.isEmpty)
builder.append("?%s".format(query))
builder.toString
}
}
private[httpx] def withoutContent(method: Method): Request =
Request(method, resource, config.version, config.headers)
private[httpx] def withContent(method: Method, content: Buf): Request = {
require(content != null)
val req = withoutContent(method)
req.content = content
req.headers.set(HttpHeaders.Names.CONTENT_LENGTH, content.length.toString)
req
}
}
/**
* Add a missing method to HttpPostRequestEncoder to allow specifying a ChannelBuffer directly as
* content of a file. This logic should eventually move to netty.
*/
private object HttpPostRequestEncoderEx {
//TODO: HttpPostBodyUtil not accessible from netty 3.5.0.Final jar
// This HttpPostBodyUtil simulates what we need.
object HttpPostBodyUtil {
val DEFAULT_TEXT_CONTENT_TYPE = "text/plain"
val DEFAULT_BINARY_CONTENT_TYPE = "application/octet-stream"
object TransferEncodingMechanism {
val BINARY = "binary"
val BIT7 = "7bit"
}
}
/*
* allow specifying post body as ChannelBuffer, the logic is adapted from netty code.
*/
def addBodyFileUpload(encoder: HttpPostRequestEncoder, factory: HttpDataFactory, request: HttpRequest)
(name: String, filename: String, content: ChannelBuffer, contentType: String, isText: Boolean) {
require(name != null)
require(filename != null)
require(content != null)
val scontentType =
if (contentType == null) {
if (isText) {
HttpPostBodyUtil.DEFAULT_TEXT_CONTENT_TYPE
} else {
HttpPostBodyUtil.DEFAULT_BINARY_CONTENT_TYPE
}
} else {
contentType
}
val contentTransferEncoding =
if (!isText) {
HttpPostBodyUtil.TransferEncodingMechanism.BINARY
} else {
HttpPostBodyUtil.TransferEncodingMechanism.BIT7
}
val fileUpload = factory.createFileUpload(request, name, filename, scontentType, contentTransferEncoding, null, content.readableBytes)
fileUpload.setContent(content)
encoder.addBodyHttpData(fileUpload)
}
}
| LithiumTD/finagle | finagle-httpx/src/main/scala/com/twitter/finagle/httpx/RequestBuilder.scala | Scala | apache-2.0 | 15,895 |
//
// Taranos Cloud Sonification Framework: Service Core
// Copyright (C) 2018 David Hinson, Netrogen Blue LLC ([email protected])
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
package org.taranos.mc.trunk.intraprocess
import org.taranos.mc.Cell
class SignalPortPlant
(implicit protected val _trunkModel: TrunkModel)
extends TrunkElementPlant
{
import scala.collection.mutable
private
val _ports = mutable.HashMap.empty[(Trunk.Key, SignalPort.Key), SignalPort]
private
def FormatTagForKey (tag: String): String =
{
// De-append any signal port substring:
if (tag.endsWith(TrunkModel.Glossary.kTagSeparator + TrunkModel.Glossary.kESignalPort))
tag.dropRight(3)
else
tag
}
def CreateSignalPort (
trunk: Trunk,
interface: SignalInterface,
constructor: SignalPort.Constructor): SignalPort =
{
// Create port element:
val port = new SignalPort(
new SignalPort.Meta(
TrunkElement.MakeUniqueKey(FormatTagForKey(constructor._tag), isObscured = false),
constructor._tag,
constructor._badgeOpt,
constructor._nameOpt,
constructor._descriptionOpt,
constructor._aliasOpt,
constructor._mode),
new SignalPort.Attrs(),
new SignalPort.Refs(
trunk.GetKey,
interface.GetKey,
constructor._inputKeyOpt))
// 1: Add element to store:
_ports += (trunk.GetKey, port.GetKey) -> port
// 2: Bind with trunk:
trunk.BindSignalPort(port.GetKey)
// 3: Bind with parent:
interface.BindPort(port.GetKey)
// 4: Bind with peers:
// N/A
// 5: Bind with children:
// N/A
// Return port:
port
}
def DestroySignalPort (
trunk: Trunk,
destructor: SignalPort.Destructor): SignalPort.Key =
{
destructor._key match
{
case key: SignalPort.Key =>
_ports.get((trunk.GetKey, key)) match
{
case Some(port) =>
// 1: Unbind with children:
// N/A
// 2: Unbind with peers:
// N/A
// 3: Unbind with parent:
_trunkModel.GetSignalInterfaceOpt(
trunk.GetKey,
port.GetInterfaceKey,
isRequired = false) match
{
case Some(interface) => interface.UnbindPort(port.GetKey)
case None => // Ok, interface must have been destroyed already.
}
// 4: Unbind with trunk:
trunk.UnbindSignalPort(port.GetKey)
// 5: Destroy children:
// 6: Remove element from store:
_ports -= ((trunk.GetKey, port.GetKey))
case None => throw TrunkException(Cell.ErrorCodes.SignalPortUnknown)
}
case _ => throw TrunkException(Cell.ErrorCodes.SignalPortInvalid)
}
// Return port key:
destructor._key
}
def DestroyAllSignalPorts (trunk: Trunk): Unit =
{
val trunkKey = trunk.GetKey
// Destroy each port of trunk:
_ports.filter(_._1._1 == trunkKey).foreach(portPair =>
{
val ((_, pairPortKey), _) = portPair
val portDestructor = SignalPort.Destructor(pairPortKey)
DestroySignalPort(trunk, portDestructor)
})
}
def GetSignalPortOpt (
trunk: Trunk,
key: SignalPort.Key,
isRequired: Boolean = true): Option[SignalPort] =
{
// Lookup port key:
key match
{
case _: SignalPort.Key =>
val opt = _ports.get((trunk.GetKey, key))
if (isRequired && opt.isEmpty)
throw TrunkException(Cell.ErrorCodes.SignalPortUnknown)
opt
case _ => throw TrunkException(Cell.ErrorCodes.SignalPortKeyInvalid)
}
}
def GetSignalPorts (trunk: Trunk): Vector[SignalPort] =
{
val trunkKey = trunk.GetKey
// Return ports vector:
_ports.filter(_._1._1 == trunkKey).values.toVector
}
def GetSignalPortKeys (trunk: Trunk): Vector[SignalPort.Key] =
{
val trunkKey = trunk.GetKey
// Return signal port keys vector:
_ports.filter(_._1._1 == trunkKey).keys.map(_._2).toVector
}
def GetElementCount (trunkKey: Trunk.Key): Int =
_ports.count(_._1._1 == trunkKey)
def LookupSignalPort (trunk: Trunk, lookupAlias: String): Option[SignalPort.Key] =
{
import scala.util.control.Breaks._
val trunkKey = trunk.GetKey
var portKeyOpt: Option[SignalPort.Key] = None
breakable
{
for (portPair <- _ports.filter(_._1._1 == trunkKey))
{
val ((_, pairPortKey), pairPort) = portPair
pairPort.GetAliasOpt match
{
case Some(alias) =>
if (alias == lookupAlias)
{
portKeyOpt = Some(pairPortKey)
break()
}
case None =>
}
}
}
portKeyOpt
}
}
| taranos/taranoscsf-core | src/main/scala/org/taranos/mc/trunk/intraprocess/SignalPortPlant.scala | Scala | agpl-3.0 | 6,491 |
import
scala.io.Source
import
sbt._,
Keys._
object PluginBuild extends Build {
def moveThatJS(classesDir: File): Unit = {
val paths = Seq(
"META-INF/resources/webjars/qunit/1.11.0/qunit.css",
"META-INF/resources/webjars/requirejs/2.1.8/require.js"
)
val dest = classesDir / "javascript"
dest.mkdir()
paths foreach writeResourceIntoDir(dest)
handleStupidAutoStartingQUnitLib(dest)
}
private def writeResourceIntoDir(dir: File)(path: String): Unit = {
val (outFile, lines) = generateFileAndLines(dir, path)
IO.writeLines(outFile, lines)
}
private def handleStupidAutoStartingQUnitLib(dir: File): Unit = {
val path = "META-INF/resources/webjars/qunit/1.11.0/qunit.js"
val (outFile, lines) = generateFileAndLines(dir, path)
val StupidAutoStartingQUnitLibSettingRegex = """(\\s*autostart:\\s*)true(,\\s*)""".r
val newLines = lines map {
case StupidAutoStartingQUnitLibSettingRegex(key, post) =>
s"${key}false${post}"
case x =>
x
}
IO.writeLines(outFile, newLines)
}
// Can almost certainly be done better (AKA without loading the whole resource into memory) --JAB (10/13/13)
private def generateFileAndLines(dir: File, path: String): (File, Seq[String]) = {
val filename = path.reverse.takeWhile(_ != '/').reverse
val rsrc = this.getClass.getClassLoader.getResourceAsStream(path)
val src = Source.fromInputStream(rsrc)
val lines = src.getLines().toList
src.close()
val outFile = dir / filename
outFile.createNewFile()
(outFile, lines)
}
}
| TheBizzle/Ghost-Host | project/Build.scala | Scala | bsd-3-clause | 1,617 |
/* __ *\
** ________ ___ / / ___ __ ____ Scala.js sbt plugin **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ |/_// /_\ \ http://scala-js.org/ **
** /____/\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\* */
package org.scalajs.jsenv.rhino
import scala.collection.mutable
import org.mozilla.javascript.{Scriptable, Context}
/** A proxy for a ScalaJS "scope" field that loads scripts lazily
*
* E.g., ScalaJS.c, which is a scope with the Scala.js classes, can be
* turned to a LazyScalaJSScope. Upon first access to a field of ScalaJS.c,
* say ScalaJS.c.scala_Option, the script defining that particular
* field will be loaded.
* This is possible because the relative path to the script can be derived
* from the name of the property being accessed.
*
* It is immensely useful, because it allows to load lazily only the scripts
* that are actually needed.
*/
class LazyScalaJSScope(
coreLib: ScalaJSCoreLib,
globalScope: Scriptable,
base: Scriptable,
isStatics: Boolean = false) extends Scriptable {
private val fields = mutable.HashMap.empty[String, Any]
private var prototype: Scriptable = _
private var parentScope: Scriptable = _
{
// Pre-fill fields with the properties of `base`
for (id <- base.getIds()) {
(id.asInstanceOf[Any]: @unchecked) match {
case name: String => put(name, this, base.get(name, base))
case index: Int => put(index, this, base.get(index, base))
}
}
}
private def load(name: String): Unit =
coreLib.load(globalScope, propNameToEncodedName(name))
private def propNameToEncodedName(name: String): String = {
if (isStatics) name.split("__")(0)
else name
}
override def getClassName() = "LazyScalaJSScope"
override def get(name: String, start: Scriptable) = {
fields.getOrElse(name, {
try {
load(name)
fields.getOrElse(name, Scriptable.NOT_FOUND)
} catch {
// We need to re-throw the exception if `load` fails, otherwise the
// JavaScript runtime will not catch it.
case t: ScalaJSCoreLib.ClassNotFoundException =>
throw Context.throwAsScriptRuntimeEx(t)
}
}).asInstanceOf[AnyRef]
}
override def get(index: Int, start: Scriptable) =
get(index.toString, start)
override def has(name: String, start: Scriptable) =
fields.contains(name)
override def has(index: Int, start: Scriptable) =
has(index.toString, start)
override def put(name: String, start: Scriptable, value: Any) = {
fields(name) = value
}
override def put(index: Int, start: Scriptable, value: Any) =
put(index.toString, start, value)
override def delete(name: String) = ()
override def delete(index: Int) = ()
override def getPrototype() = prototype
override def setPrototype(value: Scriptable) = prototype = value
override def getParentScope() = parentScope
override def setParentScope(value: Scriptable) = parentScope = value
override def getIds() = fields.keys.toArray
override def getDefaultValue(hint: java.lang.Class[_]) = {
base.getDefaultValue(hint)
}
override def hasInstance(instance: Scriptable) = false
}
| matthughes/scala-js | js-envs/src/main/scala/org/scalajs/jsenv/rhino/LazyScalaJSScope.scala | Scala | bsd-3-clause | 3,479 |
// Copyright (C) 2019 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.scenarios
import io.gatling.core.Predef._
import io.gatling.core.feeder.FeederBuilder
import io.gatling.core.structure.ScenarioBuilder
import scala.concurrent.duration._
class ReplayRecordsFromFeeder extends GitSimulation {
private val data: FeederBuilder = jsonFile(resource).convert(keys).circular
private val default: String = name
override def relativeRuntimeWeight = 30
override def replaceOverride(in: String): String = {
replaceKeyWith("_project", default, in)
}
private val test: ScenarioBuilder = scenario(unique)
.repeat(10) {
feed(data)
.exec(gitRequest)
}
private val createProject = new CreateProject(default)
private val deleteProject = new DeleteProject(default)
private val maxBeforeDelete: Int = maxExecutionTime - deleteProject.maxExecutionTime
setUp(
createProject.test.inject(
nothingFor(stepWaitTime(createProject) seconds),
atOnceUsers(1)
),
test.inject(
nothingFor(stepWaitTime(this) seconds),
atOnceUsers(10),
rampUsers(10) during (5 seconds),
constantUsersPerSec(20) during (15 seconds),
constantUsersPerSec(20) during (15 seconds) randomized
),
deleteProject.test.inject(
nothingFor(maxBeforeDelete seconds),
atOnceUsers(1)
),
).protocols(gitProtocol, httpProtocol)
.maxDuration(maxExecutionTime seconds)
}
| WANdisco/gerrit | e2e-tests/src/test/scala/com/google/gerrit/scenarios/ReplayRecordsFromFeeder.scala | Scala | apache-2.0 | 2,018 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package com.normation.rudder.web.components
import com.normation.rudder.domain.queries._
import com.normation.rudder.domain.nodes.NodeInfo
import com.normation.rudder.services.nodes.NodeInfoService
import scala.collection.mutable.Buffer
import com.normation.rudder.domain.queries.{
CriterionComposition,
Or,And,
CriterionLine,
Query
}
import com.normation.rudder.services.queries.QueryProcessor
import net.liftweb.http.Templates
import net.liftweb.http.js._
import JsCmds._
import JE._
import net.liftweb.common._
import net.liftweb.http.{SHtml,S,DispatchSnippet}
import scala.xml._
import net.liftweb.util.Helpers
import net.liftweb.util.Helpers._
import com.normation.exceptions.TechnicalException
import com.normation.rudder.web.services.SrvGrid
import scala.collection.mutable.ArrayBuffer
import com.normation.inventory.ldap.core.LDAPConstants
import LDAPConstants._
import com.normation.rudder.domain.queries.OstypeComparator
import net.liftweb.util.ToJsCmd
import bootstrap.liftweb.RudderConfig
/**
* The Search Nodes component
* It is used in the standard search server page, and in the group page (and probably elsewhere)
*
* query and srvList are both var, because they will be manipulated by the component
* we would have wanted to get back their value, but it seems it cannot be done
*
*/
class SearchNodeComponent(
htmlId : String // unused ...
, _query : Option[Query]
, _srvList : Box[Seq[NodeInfo]]
, onUpdateCallback : () => JsCmd = { () => Noop } // this one is not used yet
, onClickCallback : (String) => JsCmd = { (x:String) => Noop } // this callback is used when we click on an element in the grid
, onSearchCallback : (Boolean) => JsCmd = { (x:Boolean) => Noop } // this callback is used when a research is done and the state of the Search button changes
, saveButtonId : String = "" // the id of the save button, that gets disabled when one change the form
, groupPage : Boolean
)extends DispatchSnippet {
import SearchNodeComponent._
//our local copy of things we work on
private[this] var query = _query.map(x => x.copy())
private[this] var srvList = _srvList.map(x => Seq() ++ x)
private[this] val nodeInfoService = RudderConfig.nodeInfoService
private[this] val queryProcessor = RudderConfig.acceptedNodeQueryProcessor
// The portlet for the server detail
private[this] def serverPortletPath = List("templates-hidden", "server", "server_details")
private[this] def serverPortletTemplateFile() = Templates(serverPortletPath) match {
case Empty | Failure(_,_,_) =>
throw new TechnicalException("Template for server details not found. I was looking for %s.html".format(serverPortletPath.mkString("/")))
case Full(n) => n
}
private[this] def searchNodes = chooseTemplate("query","SearchNodes",serverPortletTemplateFile)
private[this] def content = chooseTemplate("content","query",searchNodes)
/**
* External exposition of the current state of server list.
* Page/component which includes SearchNodeComponent can use it.
* @return
*/
def getSrvList() : Box[Seq[NodeInfo]] = srvList
/**
* External exposition of the current state of query.
* Page/component which includes SearchNodeComponent can use it.
* @return
*/
def getQuery() : Option[Query] = query
var dispatch : DispatchIt = {
case "showQuery" => { _ => buildQuery }
case "head" => { _ => head() }
}
var activateSubmitButton = true
var initUpdate = true // this is true when we arrive on the page, or when we've done an search
val errors = Buffer[Box[String]]()
def head() : NodeSeq = {
<head>
{
srvGrid.head
}
</head>
}
def buildQuery() : NodeSeq = {
if(None == query) query = Some(Query(NodeReturnType,And,Seq(defaultLine)))
val lines = ArrayBuffer[CriterionLine]()
var composition = query.get.composition
var rType = query.get.returnType //for now, don't move
def addLine(i:Int) : JsCmd = {
lines.insert(i+1, CriterionLine(ditQueryData.criteriaMap(OC_NODE),ditQueryData.criteriaMap(OC_NODE).criteria(0),ditQueryData.criteriaMap(OC_NODE).criteria(0).cType.comparators(0)))
query = Some(Query(rType, composition, lines.toSeq))
activateSubmitButton = true
initUpdate = false
ajaxCriteriaRefresh
}
def removeLine(i:Int) : JsCmd ={
if(lines.size > i) {
lines.remove(i)
query = Some(Query(rType, composition, lines.toSeq))
}
activateSubmitButton = true
initUpdate = false
ajaxCriteriaRefresh
}
def processForm() : JsCmd = {
//filter on non validate values
errors.clear()
lines.zipWithIndex.foreach { case (CriterionLine(ot,a,c,v),i) =>
if(errors.size < i+1) errors.append(Empty)
a.cType.validate(v,c.id) match {
case Failure(m,_,_) => errors(i) = Full(m)
case _ => errors(i) = Empty
}
}
val newQuery = Query(rType, composition, lines.toSeq)
query = Some(newQuery)
if(errors.filter(_.isDefined).size == 0) {
// ********* EXECUTE QUERY ***********
srvList = queryProcessor.process(newQuery)
activateSubmitButton = false
initUpdate = true
} else {
// ********* ERRORS FOUND ***********"
srvList = Empty
activateSubmitButton = true
}
ajaxCriteriaRefresh & ajaxGridRefresh
}
/**
* Refresh the query parameter part
*/
def ajaxCriteriaRefresh : JsCmd = {
SetHtml("SearchForm", displayQuery(content))& activateButtonOnChange & JsRaw("correctButtons();")
}
/**
* Display the query part
* Caution, we pass an html different at the init part (whole content:query) or at update (update:query)
*
*/
def displayQuery(html: NodeSeq ) : NodeSeq = {
val Query(otName,comp, criteria) = query.get
SHtml.ajaxForm(bind("query", html,
"typeQuery" -> <label>Include policy servers: <span class="compositionCheckbox">{SHtml.checkbox(rType==NodeAndPolicyServerReturnType, { value:Boolean =>
if (value)
rType = NodeAndPolicyServerReturnType
else
rType = NodeReturnType}
)}</span></label>,
"composition" -> SHtml.radio(Seq("AND", "OR"), Full(if(comp == Or) "OR" else "AND"), {value:String =>
composition = CriterionComposition.parse(value).getOrElse(And) //default to AND on unknown composition string
}, ("class", "radio")).flatMap(e => <label>{e.xhtml} <span class="radioTextLabel">{e.key.toString}</span></label>),
"lines" -> {(ns: NodeSeq) =>
/*
* General remark :
* - bind parameter of closure to lines (so that they actually get the current value of the line when evaluated)
* - bind parameter out of closure to ot/a/c/v so that they have the current value (and not a past one)
*/
{
criteria.zipWithIndex.flatMap { case (CriterionLine(ot,a,c,v),i) =>
for(j <- lines.size to i) {
lines.append(defaultLine)
}
for(j <- errors.size to i) {
errors.append(Empty)
}
bind("line",ns,
"removeLine" -> {
if(criteria.size <= 1)
NodeSeq.Empty
else
SHtml.ajaxSubmit("-", () => removeLine(i), ("class", "removeLineButton"))
},
"addline" ->
SHtml.ajaxSubmit("+", () => addLine(i), ("class", "removeLineButton")),
"objectType" -> objectTypeSelect(ot,lines,i),
"attributeName" -> attributeNameSelect(ot,a,lines,i),
"comparator" -> comparatorSelect(ot,a,c,lines,i),
"inputValue" -> {
var form = a.cType.toForm(v, (x => lines(i) = lines(i).copy(value=x)), ("id","v_"+i), ("class", "queryInputValue"))
if(!c.hasValue) form = form % Attribute("disabled",Seq(Text("disabled")),Null)
form
} ,
"error" -> { errors(i) match {
case Full(m) => <tr><td class="error" colspan="6">{m}</td></tr>
case _ => NodeSeq.Empty
}}
)
}:NodeSeq} ++ { if(criteria.size > 0) {
//add a <script> tag to init all specific Js form renderer, like Jquery datepicker for date
var initJs = criteria(0).attribute.cType.initForm("v_0")
for(i <- 1 until criteria.size) { initJs = initJs & criteria(i).attribute.cType.initForm("v_"+i) }
Script(OnLoad(initJs))
} else NodeSeq.Empty}
},
"submit" -> {
if (activateSubmitButton)
SHtml.ajaxSubmit("Search", processForm, ("id" -> "SubmitSearch"), ("class" -> "submitButton"))
else
SHtml.ajaxSubmit("Search", processForm, ("disabled" -> "true"), ("id" -> "SubmitSearch"), ("class" -> "submitButton"))
}
)) ++ Script(OnLoad(JsVar("""
$(".queryInputValue").keydown( function(event) {
processKey(event , 'SubmitSearch')
} );
""")))
}
/**
* Show the search engine and the grid
*/
def showQueryAndGridContent() : NodeSeq = {
bind("content",searchNodes,
"query" -> {x:NodeSeq => displayQuery(x)},
"gridResult" -> srvGrid.display(Seq(), "serverGrid") // we need to set something, or IE moans
)
}
showQueryAndGridContent() ++ Script(OnLoad(ajaxGridRefresh))
}
/**
* Refresh the grid result
* A small trick of the trade : since the Showing x of xx is moved out of the ajax refresh
* zone, it must be removed before being added again, or problems will rises
* @return
*/
def ajaxGridRefresh() : JsCmd = {
val grid = gridResult
JE.JsRaw("""$("#serverGrid_info").remove();""") &
JE.JsRaw("""$("#serverGrid_length").remove();""") &
SetHtml("gridResult", grid._1) & grid._2 & activateButtonOnChange
}
/**
* When we change the form, we can update the query
* @return
*/
def activateButtonOnChange() : JsCmd = {
onSearchCallback(activateSubmitButton & !initUpdate) &
JE.JsRaw("""activateButtonDeactivateGridOnFormChange("queryParameters", "SubmitSearch", "serverGrid", "%s", "%s"); """.format(activateSubmitButton, saveButtonId))
}
/**
* From the computed result, return the NodeSeq corresponding to the grid, plus the initialisation JS
*/
def gridResult : (NodeSeq, JsCmd) = {
// Ideally this would just check the size first ?
srvList match {
case Full(seq) =>
(srvGrid.display(seq, "serverGrid"),
srvGrid.initJs("serverGrid", onClickCallback,groupPage))
case Empty =>
(srvGrid.display(Seq(), "serverGrid"),
srvGrid.initJs("serverGrid", onClickCallback,groupPage))
case f@Failure(_,_,_) => (<div><h4>Error</h4>{f.messageChain}</div>, Noop)
}
}
}
/*
* Structure of the Query:
* var query = {
* 'objectType' : 'server' , //what we are looking for at the end (servers, software...)
* 'composition' : 'and' , // or 'or'
* 'criteria': [
* { 'objectType' : '....' , 'attribute': '....' , 'comparator': '.....' , 'value': '....' } , //value is optionnal, other are mandatory
* { 'objectType' : '....' , 'attribute': '....' , 'comparator': '.....' , 'value': '....' } ,
* ...
* { 'objectType' : '....' , 'attribute': '....' , 'comparator': '.....' , 'value': '....' }
* ]
* }
*/
//some global definition for our structure
object SearchNodeComponent {
val ditQueryData = RudderConfig.ditQueryData
val srvGrid = RudderConfig.srvGrid
////////
//////// All the tools to handle the Ajax logic for
//////// dependent Select Box:
//////// - passing string param between client and server (TODO : should be json)
//////// - buildind the required JS to update the select box to its new value
////////
def parseAttrParam(s:String) : Option[(String,String,String,String,String,String,String)] = {
//expected "newObjectTypeValue,attributeSelectEltId,oldAttrValue,comparatorSelectEltId,oldCompValue,valueSelectEltId,oldValue
val reg = """([^,]*),([^,]*),([^,]*),([^,]*),([^,]*),([^,]*),([^,]*)""".r
s match {
case reg(a,b,c,d,e,f,g) => Some((a,b,c,d,e,f,g))
case _ => None
}
}
def parseCompParam(s:String) : Option[(String,String,String,String,String,String)] = {
//expect "objectTypeValue,newAttrValue,comparatorSelectEltId,oldCompValue,valueSelectEltId,oldValue
val reg = """([^,]*),([^,]*),([^,]*),([^,]*),([^,]*),([^,]*)""".r
s match {
case reg(a,b,c,d,e,f) => Some((a,b,c,d,e,f))
case _ => None
}
}
def parseValParam(s:String) : Option[(String,String)] = {
//expect "newCompValue,valueSelectEltId"
val reg = """([^,]*),([^,]*)""".r
s match {
case reg(a,b) => Some((a,b))
case _ => None
}
}
def setIsEnableFor(comparator:String,valueEltId:String) : JsCmd = {
val e = OrderedComparators.comparatorForString(comparator) match {
case None => true
case Some(comp) => comp.hasValue
}
if(e) SetExp(ElemById(valueEltId,"disabled"), JsFalse)
else SetExp(ElemById(valueEltId,"disabled"), JsTrue)
}
def updateCompAndValue(func: String => Any,ot:String,a:String,c_eltid:String,c_oldVal:String,v_eltid:String,v_old:String) : JsCmd = {
//change input display
val comp = ditQueryData.criteriaMap.get(ot) match {
case None => StringComparator
case Some(o) => o.criterionForName(a) match {
case None => StringComparator
case Some(comp) => comp.cType
}
}
val comparators = optionComparatorsFor(ot,a)
val compNames = comparators.map(_._1)
val selectedComp = compNames match {
case a::_ => compNames.filter(_==c_oldVal) match {
case x::_ => x
case Nil => a
}
case Nil => ""
}
JsRaw("jQuery('#%s').replaceWith('%s')".format(v_eltid,comp.toForm(v_old,func,("id"->v_eltid), ("class" -> "queryInputValue")))) & comp.initForm(v_eltid) &
JsCmds.ReplaceOptions(c_eltid,comparators,Full(selectedComp)) &
setIsEnableFor(selectedComp,v_eltid) &
OnLoad(JsVar("""
$(".queryInputValue").keydown( function(event) {
processKey(event , 'SubmitSearch')
} );
"""))
}
def replaceAttributes(func: String => Any)(ajaxParam:String):JsCmd = {
parseAttrParam(ajaxParam) match {
case None => Alert("Can't parse for attribute: " + ajaxParam)
case Some((ot,a_eltid,a_oldVal,c_eltid,c_oldVal,v_eltid,v_old)) =>
//change attribute list
val attributes = optionAttributesFor(ot)
val attrNames = attributes.map(_._1)
val selectedAttr = attrNames match {
case a::_ => attrNames.filter(_==a_oldVal) match {
case x::_ => x
case Nil => a
}
case Nil => ""
}
JsCmds.ReplaceOptions(a_eltid,attributes,Full(selectedAttr)) &
updateCompAndValue(func,ot,selectedAttr,c_eltid,c_oldVal,v_eltid,v_old)
}
}
def replaceComp(func: String => Any)(ajaxParam:String):JsCmd = {
parseCompParam(ajaxParam) match {
case None => Alert("Can't parse for comparator: " + ajaxParam)
case Some((ot,a,c_eltid,c_oldVal,v_eltid,v_old)) =>
updateCompAndValue(func,ot,a,c_eltid,c_oldVal,v_eltid,v_old)
}
}
def replaceValue(ajaxParam:String):JsCmd = { //elementId:String, comp:String, oldValue:String) : JsCmd = {
parseValParam(ajaxParam) match {
case None => Alert("Can't parse for value: " + ajaxParam)
case Some((c_val,v_eltid)) => setIsEnableFor(c_val,v_eltid)
}
}
//expected "newObjectTypeValue,attributeSelectEltId,oldAttrValue,comparatorSelectEltId,oldCompValue,valueSelectEltId,oldValue
def ajaxAttr(lines: Buffer[CriterionLine], i:Int) = { SHtml.ajaxCall( //we we change the attribute, we want to reset the value, see issue #1199
JE.JsRaw("this.value+',at_%s,'+%s+',ct_%s,'+ %s +',v_%s,'+%s".format(i, ValById("at_"+i).toJsCmd,i,ValById("ct_"+i).toJsCmd,i,Str("").toJsCmd)),
s => After(200, replaceAttributes(x => lines(i) = lines(i).copy(value=x))(s))) }
//expect "objectTypeValue,newAttrValue,comparatorSelectEltId,oldCompValue,valueSelectEltId
def ajaxComp(lines: Buffer[CriterionLine], i:Int)= { SHtml.ajaxCall( //we we change the attribute, we want to reset the value, see issue #1199
JE.JsRaw("%s+','+this.value+',ct_%s,'+ %s +',v_%s,'+%s".format(ValById("ot_"+i).toJsCmd, i, ValById("ct_"+i).toJsCmd,i,Str("").toJsCmd)),
s => After(200, replaceComp(x => lines(i) = lines(i).copy(value=x))(s))) }
//expect "newCompValue,valueSelectEltId"
def ajaxVal(lines: Buffer[CriterionLine], i:Int) = { SHtml.ajaxCall(
JE.JsRaw("this.value+',v_%s'".format(i)),
s => After(200, replaceValue(s))) }
////////
//////// Build require select box for a line
////////
//how to present the first select box
val otOptions : List[(String,String)] = {
val opts = Buffer[(String,String)]()
def add(s:String, pre:String="") = opts += ((s,pre + S.?("ldap.object."+s)))
add(OC_NODE)
add(OC_NET_IF, " ├─ ")
add(OC_FS, " ├─ ")
add(A_PROCESS, " ├─ ")
add(OC_VM_INFO," ├─ ")
add(A_EV, " └─ ")
add(OC_MACHINE)
add(OC_BIOS, " ├─ ")
add(OC_CONTROLLER, " ├─ ")
add(OC_MEMORY, " ├─ ")
add(OC_PORT, " ├─ ")
add(OC_PROCESSOR, " ├─ ")
add(OC_SLOT, " ├─ ")
add(OC_SOUND, " ├─ ")
add(OC_STORAGE, " ├─ ")
add(OC_VIDEO, " └─ ")
add(OC_SOFTWARE)
opts.toList
}
def optionAttributesFor(objectType:String) : List[(String,String)] = {
ditQueryData.criteriaMap.get(objectType) match {
case None => List()
case Some(ot) => ot.criteria.map(x => (x.name,S.?("ldap.attr."+x.name))).toList
}
}
def optionComparatorsFor(objectType:String,attribute:String) : List[(String,String)] = {
ditQueryData.criteriaMap.get(objectType) match {
case None => List()
case Some(ot) => ot.criterionForName(attribute) match {
case None => List()
case Some(a) => a.cType.comparators.map(x => (x.id,S.?("ldap.comp."+x.id))).toList
}
}
}
def objectTypeSelect(ot:ObjectCriterion,lines: Buffer[CriterionLine],i:Int) : NodeSeq = {
SHtml.untrustedSelect(
otOptions,
Full(ot.objectType),
({ x =>
ditQueryData.criteriaMap.get(x) foreach { o => if(i >= 0 && i < lines.size) lines(i) = lines(i).copy(objectType=o) }
}),
("id","ot_"+i),
("onchange", ajaxAttr(lines,i)._2.toJsCmd),
("class","selectField")
)
}
def attributeNameSelect(ot:ObjectCriterion,a:Criterion,lines: Buffer[CriterionLine],i:Int) : NodeSeq = {
SHtml.untrustedSelect(
optionAttributesFor(ot.objectType),
Full(a.name),
(x => { //check that x is really a value of ot
if(i >= 0 && i < lines.size) lines(i).objectType.criterionForName(x) foreach { y => lines(i) = lines(i).copy(attribute=y) }
}),
("id","at_"+i),
("onchange", ajaxComp(lines,i)._2.toJsCmd),
("class","selectField")
)
}
def comparatorSelect(ot:ObjectCriterion,a:Criterion,c:CriterionComparator,lines: Buffer[CriterionLine],i:Int) : NodeSeq = {
SHtml.untrustedSelect(
optionComparatorsFor(ot.objectType,a.name),
Full(c.id),
(x => {
if(i >= 0 && i < lines.size) lines(i).attribute.cType.comparatorForString(x) foreach { y => lines(i) = lines(i).copy(comparator=y) }
}),
("id","ct_"+i),
("onchange", ajaxVal(lines,i)._2.toJsCmd),
("class","selectComparator")
)
}
val defaultLine : CriterionLine = {
//in case of further modification in ditQueryData
require(ditQueryData.criteriaMap(OC_NODE).criteria(0).name == "OS", "Error in search node criterion default line, did you change DitQueryData ?")
require(ditQueryData.criteriaMap(OC_NODE).criteria(0).cType.isInstanceOf[OstypeComparator.type], "Error in search node criterion default line, did you change DitQueryData ?")
CriterionLine(
objectType = ditQueryData.criteriaMap(OC_NODE)
, attribute = ditQueryData.criteriaMap(OC_NODE).criteria(0)
, comparator = ditQueryData.criteriaMap(OC_NODE).criteria(0).cType.comparators(0)
, value = "Linux"
)
}
}
| jooooooon/rudder | rudder-web/src/main/scala/com/normation/rudder/web/components/SearchNodeComponent.scala | Scala | agpl-3.0 | 22,140 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jp.gihyo.spark.ch08
// scalastyle:off println
import org.apache.log4j.{Level, Logger}
import org.apache.spark.{SparkContext, SparkConf}
import org.apache.spark.graphx.{EdgeDirection, Graph, Edge}
import org.apache.spark.rdd.RDD
/**
* An example code of recommendation by using GraphX
*
* Run with
* {{{
* spark-submit --class jp.gihyo.spark.ch08.GraphxRecommendExample \\
* path/to/gihyo-spark-book-example_2.10-1.0.1.jar \\
* numProducts numUsers numProductsPerUser [ RandomSelection | PreferentialAttachment ]
* }}}
*/
object GraphxRecommendExample {
def main(args: Array[String]): Unit = {
if (args.length != 4) {
new IllegalArgumentException("Invalid arguments")
System.exit(1)
}
// ログレベルをWARNに設定
Logger.getLogger("org").setLevel(Level.WARN)
// SparkContextの生成
val conf = new SparkConf().setAppName("GraphxRecommendExample")
val sc = new SparkContext(conf)
// 引数から設定値を取得
val (numProducts, numUsers, numProductsPerUser): (Int, Int, Int) =
(args(0).toInt, args(1).toInt, args(2).toInt)
implicit val recOpts: RecommendLogOptions =
RecommendLogOptions(numProducts, numUsers, numProductsPerUser)
implicit val pidGenerator = ProductIdGenerator.fromString(args(3))
run(sc)
sc.stop()
}
def run(sc: SparkContext)
(implicit recOpts: RecommendLogOptions, pidGenerator: ProductIdGenerator): Unit = {
// 商品、ユーザのリストを生成
val products: List[VertexProperty] = PurchaseLogGenerator.genProductList
val users: List[VertexProperty] = PurchaseLogGenerator.genUserList
// 商品リスト20件を表示
println("===================================")
println("get top 20 products:")
products.take(20).foreach(x => println(s"id: ${x.id},\\ttype: ${x.kind},\\tname: ${x.name}"))
// ユーザリスト20件を表示
println("===================================")
println("get top 20 users:")
users.take(20).foreach(x => println(
s"id: ${x.id},\\ttype: ${x.kind},\\tname: ${x.name}"
))
val vertices: RDD[(Long, VertexProperty)] =
sc.parallelize((users ++ products).map(v => (v.id, v)), numSlices = 10).cache()
// 購入ログを生成
val purchaseLog = PurchaseLogGenerator.genPurchaseLog(users).map {
p => Edge(p.uid, p.pid, EdgeProperty(kind = "purchase", score = 1.0))
}
val edges: RDD[Edge[EdgeProperty]] = sc.parallelize(purchaseLog)
// 購入ログ20件を表示
println("===================================")
println("get top 20 purchase log:")
purchaseLog.take(20).foreach(x => println(s"user${x.srcId}\\tpurchased a product${x.dstId}"))
// グラフの作成
val graph: Graph[VertexProperty, EdgeProperty] = Graph(vertices, edges).cache()
// レコメンド対象となる商品ID
val targetProductId = 1L
// レコメンドのリストを計算
val recommends = genRecommendEdges(graph, targetProductId)
// レコメンドのリストを表示
println("===================================")
println("get top 20 recommends:")
recommends.take(20).foreach { x =>
println(s"product${x.srcId}\\thas a recommended product${x.dstId}\\twith score ${x.attr.score}")
}
}
// 商品間のつながりの強さをあらわす recommendEdges を生成する関数
private def genRecommendEdges(graph: Graph[VertexProperty, EdgeProperty], targetId: Long)
(implicit recOpts: RecommendLogOptions): RDD[Edge[(EdgeProperty)]] = {
// 入力されたユーザと商品の関係をあらわすグラフに対して、 Pregel を適用
// 前処理として、引数で指定された頂点のプロパティ値を1として、
// それ以外の頂点のプロパティ値を0とする
val recommends = graph.mapVertices((id, v) => if (id == targetId) 1 else 0 ).pregel(
initialMsg = 0, // 初期メッセージの設定
maxIterations = 2, // 最大イテレーション数の設定
activeDirection = EdgeDirection.Either // sendMsg を実行する辺の条件
)(
// メッセージを受信した頂点の処理
vprog = (id, dist, newDist) => math.max(dist, newDist),
// 頂点間のメッセージを送信する処理
sendMsg = triplet => {
if (triplet.srcAttr > 0) Iterator((triplet.dstId, triplet.srcAttr))
else if (triplet.dstAttr > 0) Iterator((triplet.srcId, triplet.dstAttr))
else Iterator.empty
},
// 複数のメッセージをマージする処理
mergeMsg = (a, b) => a + b
// 計算されたスコアが0より大きい頂点のみを抽出
).subgraph(vpred = (id, v) => v > 0)
// 引数で指定された頂点のプロパティ値(次数に一致)を取得
val degree = recommends.vertices.filter(v => v._1 == targetId).first()._2
val recommendEdges = recommends.vertices.collect {
// 商品ノードかつ引数で指定された頂点以外のノードを抽出
case (dstId, d) if dstId <= recOpts.numProducts && dstId != targetId =>
// 各商品ノードについて、引数で指定された商品を購入した人のうち、
// その商品も購入している割合を計算
Edge(targetId, dstId, EdgeProperty(kind = "recommend", score = d.toDouble / degree))
}
recommendEdges
}
}
// scalastyle:on println
| yu-iskw/gihyo-spark-book-example | src/main/scala/jp/gihyo/spark/ch08/GraphxRecommendExample.scala | Scala | apache-2.0 | 6,245 |
package sweet
import org.scalatools.testing._
class SweetRunner(val classLoader: ClassLoader, loggers: Array[Logger]) extends Runner {
def run(testClassName: String, fingerprint: TestFingerprint, eventHandler: EventHandler, args: Array[String]){
val testClass = Class.forName(testClassName, true, classLoader).asSubclass(classOf[Sweet])
val sweet = testClass.newInstance
val reporter = new MySweetReporter(eventHandler)
sweet.run(reporter)
}
class MySweetReporter(eventHandler: EventHandler) extends SweetReporter with NotNull{
def newEvent(tn: String, r: Result, e: Option[Throwable]) {
class MyEvent(val testName:String, val description:String, val result:Result, val error:Throwable) extends Event
eventHandler.handle(new MyEvent(tn, tn, r, e getOrElse null))
}
def logInfo(s:String){ loggers.foreach(_ info s) }
def logError(s:String){ loggers.foreach(_ error s) }
def apply(event: SweetEvent) {
event match {
case t: TestStarting => loggers.foreach(_ info "Test Starting: " + t.testName)
case t: TestErrored =>
t.reason.printStackTrace
logError("Test Failed:" + t.testName)
newEvent(t.testName, Result.Failure, Some(t.reason))
case t: TestSucceeded =>
logInfo("Test Passed: " + t.testName)
newEvent(t.testName, Result.Success, None)
}
}
}
} | joshcough/Sweet | src/main/scala/sweet/SweetRunner.scala | Scala | lgpl-2.1 | 1,399 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.