code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.adam.util
import java.io.File
import org.bdgenomics.adam.models.ReferenceRegion
import org.bdgenomics.utils.io.LocalFileByteAccess
class TwoBitFileSuite extends ADAMFunSuite {
test("correctly read sequence from .2bit file") {
val file = new File(testFile("hg19.chrM.2bit"))
val byteAccess = new LocalFileByteAccess(file)
val twoBitFile = new TwoBitFile(byteAccess)
assert(twoBitFile.numSeq == 1)
assert(twoBitFile.seqRecords.toSeq.length == 1)
assert(twoBitFile.extract(ReferenceRegion("hg19_chrM", 0, 10)) == "GATCACAGGT")
assert(twoBitFile.extract(ReferenceRegion("hg19_chrM", 503, 513)) == "CATCCTACCC")
assert(twoBitFile.extract(ReferenceRegion("hg19_chrM", 16561, 16571)) == "CATCACGATG")
}
test("correctly return masked sequences from .2bit file") {
val file = new File(testFile("hg19.chrM.2bit"))
val byteAccess = new LocalFileByteAccess(file)
val twoBitFile = new TwoBitFile(byteAccess)
assert(twoBitFile.extract(ReferenceRegion("hg19_chrM", 0, 10), true) == "GATCACAGGT")
assert(twoBitFile.extract(ReferenceRegion("hg19_chrM", 2600, 2610), true) == "taatcacttg")
}
test("correctly return Ns from .2bit file") {
val file = new File(testFile("human_g1k_v37_chr1_59kb.2bit"))
val byteAccess = new LocalFileByteAccess(file)
val twoBitFile = new TwoBitFile(byteAccess)
assert(twoBitFile.extract(ReferenceRegion("1", 9990, 10010), true) == "NNNNNNNNNNTAACCCTAAC")
}
test("correctly calculates sequence dictionary") {
val file = new File(testFile("hg19.chrM.2bit"))
val byteAccess = new LocalFileByteAccess(file)
val twoBitFile = new TwoBitFile(byteAccess)
val dict = twoBitFile.sequences
assert(dict.records.length == 1)
assert(dict.records.head.length == 16571)
}
}
| massie/adam | adam-core/src/test/scala/org/bdgenomics/adam/util/TwoBitFileSuite.scala | Scala | apache-2.0 | 2,600 |
/*
* Copyright Β© 2015 Reactific Software LLC. All Rights Reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package rxmongo.driver
import java.net.{ Socket, InetSocketAddress, InetAddress }
import javax.net.SocketFactory
import akka.actor.{ ActorSystem, ActorRef }
import akka.pattern.ask
import org.specs2.execute.Result
import rxmongo.bson.BSONObject
import rxmongo.messages.cmds.BuildInfoCmd
import rxmongo.messages.replies.BuildInfoReply
import rxmongo.messages.replies.BuildInfoReply.BuildInfoCodec
import rxmongo.messages.{ ReplyMessage, QueryMessage }
import scala.concurrent.{ Await }
import scala.concurrent.duration._
class DriverSpec extends AkkaTest(ActorSystem("DriverSpec")) {
implicit val timeout = Driver.defaultTimeout
def mongoTest(f : () β Result) : Result = {
if (Helper.haveLocalMongo) {
f()
} else {
skipped(": no local mongo")
}
}
sequential
"Driver" should {
"confirm verson 3 in BuildInfo" in mongoTest { () =>
val driver = Driver(None, "BuildInfo")
val future = driver.connect("mongodb://localhost/")
val conn = Await.result(future, Duration(1, "s"))
conn.isInstanceOf[ActorRef] must beTrue
val c = conn.asInstanceOf[ActorRef]
val future2 = c.ask(BuildInfoCmd)
val x = Await.result(future2, 1.seconds)
driver.close(500.millis)
x.isInstanceOf[ReplyMessage] must beTrue
val reply : ReplyMessage = x.asInstanceOf[ReplyMessage]
reply.documents.size must beEqualTo(1)
val buildInfoReply = reply.documents.head.to[BuildInfoReply]
buildInfoReply.versionArray(0) must beGreaterThanOrEqualTo(3)
}
/*
"mind its lifecycle" in {
val driver = Driver(None, "Lifecycle")
driver.close(1.second)
driver.isClosed must beTrue
}
"make a simple connection" in mongoTest { () β
val driver = Driver(None, "Simple")
val future = driver.connect("mongodb://localhost/")
val conn = Await.result(future, 1.seconds)
conn.isInstanceOf[ActorRef] must beTrue
driver.close(500.millis)
success
}
*/
"send an innocuous query" in mongoTest { () β
val driver = Driver(None, "Innocuous")
val future = driver.connect("mongodb://localhost/")
val conn = Await.result(future, Duration(1, "s"))
conn.isInstanceOf[ActorRef] must beTrue
val c = conn.asInstanceOf[ActorRef]
val msg = QueryMessage("rxmongo.test", BSONObject("foo" -> 1))
val future2 = c.ask(msg)
val x = Await.result(future2, 1.seconds)
driver.close(500.millis)
x.isInstanceOf[ReplyMessage] must beTrue
}
/*
"handle a CheckReplicaSet" in mongoTest { () β
val driver = Driver(None, "Innocuous")
val future = driver.connect("mongodb://localhost/") map { conn : ActorRef β
conn ! Connection.CheckReplicaSet
}
Await.result(future, Duration(1, "s"))
success
}
"return 0 for numConnections when first established" in {
val driver = Driver(None, "numConnections=0")
Await.result(driver.numConnections, 1.seconds) must beEqualTo(0)
}
"return 0 before a connection is established, 1 afterwards" in mongoTest { () β
val driver = Driver(None, "ConnectionCount")
val before = driver.numConnections
val future = driver.connect("mongodb://localhost/")
val conn = Await.result(future, 1.second)
conn.isInstanceOf[ActorRef] must beTrue
val after = driver.numConnections
val result = Await.result(Future.sequence(Seq(before, after)), 1.second)
result must beEqualTo(Seq(0, 1))
driver.close(1.second)
success
}
*/
}
}
object Helper {
lazy val haveLocalMongo : Boolean = {
val addr = InetAddress.getLoopbackAddress
val port = 27017
val socketAddress = new InetSocketAddress(addr, port)
try {
val socket : Socket = SocketFactory.getDefault.createSocket
socket.connect(socketAddress, 1000)
socket.close()
true
} catch {
case x : Throwable β false
}
}
}
| reactific/RxMongo | driver/src/test/scala/rxmongo/driver/DriverSpec.scala | Scala | mit | 5,165 |
package org.allenai.pnp
import scala.collection.JavaConverters._
import org.scalatest._
import org.scalatest.Matchers
import com.jayantkrish.jklol.ccg.lambda.ExpressionParser
class PnpUtilSpec extends FlatSpec with Matchers {
val TOLERANCE = 0.0001
val parser = ExpressionParser.expression2
def flip(p: Double): Pnp[Boolean] = {
Pnp.chooseMap(Seq((true, p), (false, 1.0 - p)))
}
val bindings = Map[String, AnyRef](
"true" -> true.asInstanceOf[AnyRef],
"false" -> false.asInstanceOf[AnyRef],
"coin" -> Pnp.chooseMap(Seq((true, 0.6), (false, 0.4))),
"flipProb" -> 0.6.asInstanceOf[AnyRef],
"flipProb2" -> 0.55.asInstanceOf[AnyRef],
"flip" -> PnpUtil.wrap(flip _),
"filter" -> PnpUtil.wrap(PnpUtil.filter _),
"list" -> { x: Vector[AnyRef] => Pnp.value(x.toList) },
"concat" -> PnpUtil.wrap2({ (x: String, y: String) => x ++ y })
)
def runTest[A](exprString: String, expected: Seq[(A, Double)]): Unit = {
val expr = parser.parse(exprString)
val pp = PnpUtil.lfToPnp(expr, bindings)
val values = pp.beamSearch(100).executions.map(x => (x.value, x.prob))
for ((value, expected) <- values.zip(expected)) {
value._1 should be(expected._1)
value._2 should be(expected._2 +- TOLERANCE)
}
}
"PpUtil" should "correctly interpret constants" in {
runTest("coin", Seq((true, 0.6), (false, 0.4)))
}
it should "correctly interpret string constants" in {
runTest("\\"foo\\"", Seq(("foo", 1.0)))
}
it should "correctly interpret applications" in {
runTest("(flip flipProb)", Seq((true, 0.6), (false, 0.4)))
}
it should "correctly interpret applications (2)" in {
runTest("(list flipProb)", Seq((List(0.6), 1.0)))
}
it should "correctly interpret applications (3)" in {
runTest("(concat \\"foo\\" \\"bar\\")", Seq(("foobar", 1.0)))
}
it should "correctly interpret filters" in {
runTest(
"(filter (lambda (x) (flip x)) (list flipProb flipProb2))",
Seq((List(0.6, 0.55), 0.6 * 0.55), (List(0.6), 0.6 * 0.45),
(List(0.55), 0.4 * 0.55), (List(), 0.4 * 0.45))
)
}
} | jayantk/pnp | src/test/scala/org/allenai/pnp/PnpUtilSpec.scala | Scala | apache-2.0 | 2,111 |
package scala.models
import com.bryzek.apidoc.generator.v0.models.{File, InvocationForm}
import com.bryzek.apidoc.spec.v0.models.Attribute
import lib.Text._
import lib.generator.CodeGenerator
import scala.generator.{ScalaEnums, ScalaCaseClasses, ScalaService}
import scala.util.matching.Regex
import generator.ServiceFileNames
import play.api.libs.json.JsString
object Kafka10Consumer extends CodeGenerator {
import CaseClassUtil._
import KafkaUtil._
override def invoke(
form: InvocationForm
): Either[Seq[String], Seq[File]] = {
Right(generateCode(form))
}
def generateTopicRegex(topicFn: String, apiVersion: String) = {
val tenantVariable = Seq("${tenant}", "$tenant").map(Regex.quote(_)).mkString("|")
//`tenantsPattern` is a val defined in the `topicRegex` function, see `source` below.
topicFn.replaceAll(tenantVariable, Regex.quoteReplacement("($tenantsPattern)"))
}
def generateCode(
form: InvocationForm,
addHeader: Boolean = true
): Seq[File] = {
val ssd = ScalaService(form.service)
val prefix = underscoreAndDashToInitCap(ssd.name)
val enumJson: String = ssd.enums.map { ScalaEnums(ssd, _).buildJson() }.mkString("\\n\\n")
val play2Json = Play2JsonExtended(ssd).generate()
val header = if (addHeader) ApidocComments(form.service.version, form.userAgent).toJavaString() + "\\n"
else ""
val kafkaModels = getKafkaModels(ssd)
// Return list of files
kafkaModels.map{ model β
val className = model.name
val configPath = ssd.namespaces.base.split("\\\\.").toSeq.dropRight(1).mkString(".")
val kafkaProps = getKafkaProps(model.model).get
val apiVersion = ssd.namespaces.last
val topicFn = kafkaProps.topic
val topicRegex = generateTopicRegex(topicFn, apiVersion)
val source = s"""$header
import java.util.Properties
import scala.language.postfixOps
import scala.annotation.tailrec
import scala.collection.JavaConversions._
import scala.util.matching.Regex
import scala.util.{ Try, Success }
import com.typesafe.config.Config
import com.typesafe.config.ConfigValueType
import play.api.libs.json.Json
import org.apache.kafka.clients.consumer.KafkaConsumer
import org.apache.kafka.clients.consumer.ConsumerRecord
import org.apache.kafka.clients.consumer.ConsumerRecords
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.kafka.common.TopicPartition
import movio.api.kafka_0_10.Consumer
package ${ssd.namespaces.base}.kafka {
import ${ssd.namespaces.base}.models._
import ${ssd.namespaces.base}.models.json._
object ${className}Topic {
/**
The version of the api - apidoc generator enforces this value.
For use when creating a topic name.
Example: "v2"
*/
val apiVersion = "${apiVersion}"
/**
The name of the kafka topic to publish and consume records from.
This is a scala statedment/code that that gets executed
Example: `s"mc-servicename-$${apiVersion}-$${instance}-$${tenant}"`
@param instance an instance of the topic, eg uat, prod. It's read from the config.
@param tenant is the customer id, eg vc_regalus
*/
def topic(instance: String)(tenant: String) = ${topicFn}
/**
The regex for the kafka consumer to match topics.
@param instance an instance of the topic, eg uat, prod. It's read from the config.
@param tenants the tenants of the topics from which the consumer consumes. If it's empty,
all tenants are matched.
*/
def topicRegex(inst: String, tenants: Seq[String]) = {
val instance = Regex.quote(inst)
val tenantsPattern = if (tenants.isEmpty) ".*"
else tenants.map(Regex.quote(_)).mkString("|")
${topicRegex}
}
}
object ${className}Consumer {
val base = "${configPath}.kafka.consumer"
val BootstrapServers = s"$$base.bootstrap.servers"
val TopicInstanceKey = s"$$base.topic.instance"
val TenantsKey = s"$$base.tenants"
val PollTimeoutKey = s"$$base.poll.timeout" // ms
val PropertiesKey = s"$$base.properties"
}
class ${className}Consumer (
config: Config,
consumerGroupId: String,
tenants: Option[Seq[String]] = None
) extends Consumer[${className}] {
import ${className}Consumer._
val pollMillis = config.getLong(PollTimeoutKey)
lazy val topicRegex: Regex =
${className}Topic.topicRegex(
config.getString(TopicInstanceKey),
tenants.getOrElse(config.getStringList(TenantsKey))
).r
lazy val kafkaConsumer = new KafkaConsumer[String, String](readConsumerPropertiesFromConfig)
kafkaConsumer.subscribe(topicRegex.pattern, new ConsumerRebalanceListener {
def onPartitionsRevoked(partitions: java.util.Collection[TopicPartition]) = {}
def onPartitionsAssigned(partitions: java.util.Collection[TopicPartition]) = {}
})
def readConsumerPropertiesFromConfig = {
val properties = new Properties
properties.put("auto.offset.reset", "earliest")
properties.put("enable.auto.commit", "false")
if (config.hasPath(PropertiesKey)) {
config.getConfig(PropertiesKey)
.entrySet
.filter { _.getValue.valueType == ConfigValueType.STRING }
.foreach { e β properties.put(e.getKey, e.getValue.unwrapped) }
}
properties.put("bootstrap.servers", config.getString(BootstrapServers))
properties.put("group.id", consumerGroupId)
properties.put("key.deserializer", classOf[StringDeserializer].getName)
properties.put("value.deserializer", classOf[StringDeserializer].getName)
properties
}
/**
* Process a batch of records with given processor function and commit
* offsets if it succeeds. Records with null payloads are ignored.
*
* @param processor processor function that takes a map of records for different tenants
* @param batchSize the maximum number of records to process
*/
def processBatchThenCommit(
processor: Map[String, Seq[${className}]] β Try[Map[String, Seq[${className}]]],
batchSize: Int = 1
): Try[Map[String, Seq[${className}]]] =
doProcess[${className}] { record β
Option(record.value).map(Json.parse(_).as[${className}])
}(processor, batchSize)
/**
* Process a batch of records with given processor function and commit
* offsets if it succeeds.
*
* Each record is a tuple of the key and the payload deserialised to
* `Option[T]` which is `None` when the record has a null payload.
*
* @param processor processor function that takes a map of records for different tenants
* @param batchSize the maximum number of records to process
*/
def processBatchWithKeysThenCommit(
processor: Map[String, Seq[(String, Option[${className}])]] β Try[Map[String, Seq[(String, Option[${className}])]]],
batchSize: Int = 1
): Try[Map[String, Seq[(String, Option[${className}])]]] =
doProcess[(String, Option[${className}])] { record β
Some(
record.key β Option(record.value).map(Json.parse(_).as[${className}])
)
}(processor, batchSize)
def doProcess[T](
converter: ConsumerRecord[String, String] β Option[T]
)(
processor: Map[String, Seq[T]] β Try[Map[String, Seq[T]]],
batchSize: Int = 1
): Try[Map[String, Seq[T]]] = {
val batch = Try {
import scala.collection.JavaConverters._
kafkaConsumer.poll(pollMillis).toSeq.flatMap { r β
val topic = r.topic
val value = r.value
val topicRegex(tenant) = r.topic
converter(r).map(t β (tenant, t))
}.groupBy(_._1).mapValues(_.map(_._2))
}
for {
records β batch
processedRecords β processor(records)
} yield {
kafkaConsumer.commitSync()
processedRecords
}
}
}
}
"""
ServiceFileNames.toFile(form.service.namespace, form.service.organization.key, form.service.application.key, form.service.version, s"${className}Consumer", source, Some("Scala"))
}
}
}
| movio/movio-apidoc-generator | scala-generator/src/main/scala/models/Kafka10Consumer.scala | Scala | mit | 8,248 |
package org.scalaide.core.quickassist.createmethod
import org.junit.Test
import org.junit.Assert._
import org.scalaide.core.internal.quickassist.createmethod.ParameterList
import org.scalaide.core.internal.quickassist.createmethod.ParameterListUniquifier
class ParameterListUniquifierTest {
@Test def makeNamesUnique(): Unit = {
val expected: ParameterList = List(List(("someName", "Any"), ("arg", "Any"), ("arg1", "Any")), List(("arg2", "Any")))
assertEquals(expected, ParameterListUniquifier.uniquifyParameterNames(List(List(("someName", "Any"), ("arg", "Any"), ("arg", "Any")), List(("arg", "Any")))))
}
} | Kwestor/scala-ide | org.scala-ide.sdt.core.tests/src/org/scalaide/core/quickassist/createmethod/ParameterListUniquifierTest.scala | Scala | bsd-3-clause | 622 |
package io.iteratee.testing
import cats.Monad
import cats.kernel.Eq
import io.iteratee.{ Enumeratee, Enumerator, Iteratee }
import org.scalacheck.Arbitrary
import scala.Predef._
trait EqInstances {
implicit def eqThrowable: Eq[Throwable] = Eq.fromUniversalEquals
implicit def eqEnumerator[F[_]: Monad, A: Eq](implicit eqFVA: Eq[F[Vector[A]]]): Eq[Enumerator[F, A]] =
Eq.by(_.toVector)
implicit def eqIteratee[F[_]: Monad, A: Eq: Arbitrary, B: Eq: Arbitrary](implicit
eqFB: Eq[F[B]]
): Eq[Iteratee[F, A, B]] = {
val e0 = Enumerator.empty[F, A]
val e1 = Enumerator.enumList[F, A](Arbitrary.arbitrary[List[A]].sample.get)
val e2 = Enumerator.enumStream[F, A](Arbitrary.arbitrary[Stream[A]].sample.get)
val e3 = Enumerator.enumVector[F, A](Arbitrary.arbitrary[Vector[A]].sample.get)
Eq.instance { (i, j) =>
eqFB.eqv(e0.into(i), e0.into(j)) &&
eqFB.eqv(e1.into(i), e1.into(j)) &&
eqFB.eqv(e2.into(i), e2.into(j)) &&
eqFB.eqv(e3.into(i), e3.into(j))
}
}
implicit def eqEnumeratee[F[_]: Monad, A: Eq: Arbitrary, B: Eq: Arbitrary](implicit
eqFVB: Eq[F[Vector[B]]]
): Eq[Enumeratee[F, A, B]] = {
val e0 = Enumerator.empty[F, A]
val e1 = Enumerator.enumList[F, A](Arbitrary.arbitrary[List[A]].sample.get)
val e2 = Enumerator.enumStream[F, A](Arbitrary.arbitrary[Stream[A]].sample.get)
val e3 = Enumerator.enumVector[F, A](Arbitrary.arbitrary[Vector[A]].sample.get)
Eq.instance { (i, j) =>
eqFVB.eqv(e0.through(i).toVector, e0.through(j).toVector) &&
eqFVB.eqv(e1.through(i).toVector, e1.through(j).toVector) &&
eqFVB.eqv(e2.through(i).toVector, e2.through(j).toVector) &&
eqFVB.eqv(e3.through(i).toVector, e3.through(j).toVector)
}
}
}
object EqInstances extends EqInstances
| flyingwalrusllc/iteratee | testing/shared/src/main/scala/io/iteratee/testing/EqInstances.scala | Scala | apache-2.0 | 1,797 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding
import cascading.tuple.Fields
import cascading.tuple.TupleEntry
import java.util.concurrent.TimeUnit
import org.specs._
import java.lang.{Integer => JInt}
class NumberJoinerJob(args : Args) extends Job(args) {
val in0 = TypedTsv[(Int,Int)]("input0").read.rename((0,1) -> ('x0, 'y0))
val in1 = Tsv("input1").read.mapTo((0,1) -> ('x1, 'y1)) { input : (Long, Long) => input }
in0.joinWithSmaller('x0 -> 'x1, in1)
.write(Tsv("output"))
}
class NumberJoinTest extends Specification {
import Dsl._
"A NumberJoinerJob" should {
//Set up the job:
"not throw when joining longs with ints" in {
JobTest("com.twitter.scalding.NumberJoinerJob")
.source(TypedTsv[(Int,Int)]("input0"), List((0,1), (1,2), (2,4)))
.source(Tsv("input1"), List(("0","1"), ("1","3"), ("2","9")))
.sink[(Int,Int,Long,Long)](Tsv("output")) { outBuf =>
val unordered = outBuf.toSet
unordered.size must be_==(3)
unordered((0,1,0L,1L)) must be_==(true)
unordered((1,2,1L,3L)) must be_==(true)
unordered((2,4,2L,9L)) must be_==(true)
}
.run
.runHadoop
.finish
}
}
}
class SpillingJob(args: Args) extends Job(args) {
TypedTsv[(Int, Int)]("input").read.rename((0,1) -> ('n, 'v))
.groupBy('n) { group =>
group.spillThreshold(3).sum[Int]('v).size
}.write(Tsv("output"))
}
class SpillingTest extends Specification {
import Dsl._
"A SpillingJob" should {
val src = (0 to 9).map(_ -> 1) ++ List(0 -> 4)
val result = src.groupBy(_._1)
.mapValues { v => (v.map(_._2).sum, v.size) }
.map { case (a, (b, c)) => (a, b, c) }
.toSet
//Set up the job:
"work when number of keys exceeds spill threshold" in {
JobTest(new SpillingJob(_))
.source(TypedTsv[(Int, Int)]("input"), src)
.sink[(Int, Int, Int)](Tsv("output")) { outBuf =>
outBuf.toSet must be_==(result)
}.run
.runHadoop
.finish
}
}
}
object GroupRandomlyJob {
val NumShards = 10
}
class GroupRandomlyJob(args: Args) extends Job(args) {
import GroupRandomlyJob.NumShards
Tsv("fakeInput").read
.mapTo(0 -> 'num) { (line: String) => line.toInt }
.groupRandomly(NumShards) { _.max('num) }
.groupAll { _.size }
.write(Tsv("fakeOutput"))
}
class GroupRandomlyJobTest extends Specification {
import GroupRandomlyJob.NumShards
noDetailedDiffs()
"A GroupRandomlyJob" should {
val input = (0 to 10000).map { _.toString }.map { Tuple1(_) }
JobTest("com.twitter.scalding.GroupRandomlyJob")
.source(Tsv("fakeInput"), input)
.sink[(Int)](Tsv("fakeOutput")) { outBuf =>
val numShards = outBuf(0)
numShards must be_==(NumShards)
}
.run.finish
}
}
class ShuffleJob(args: Args) extends Job(args) {
Tsv("fakeInput")
.read
.mapTo(0 -> 'num) { (line: String) => line.toInt }
.shuffle(shards = 1, seed = 42L)
.groupAll{ _.toList[Int]('num -> 'num) }
.write(Tsv("fakeOutput"))
}
class ShuffleJobTest extends Specification {
noDetailedDiffs()
val expectedShuffle : List[Int] = List(10, 5, 9, 12, 0, 1, 4, 8, 11, 6, 2, 3, 7)
"A ShuffleJob" should {
val input = (0 to 12).map { Tuple1(_) }
JobTest("com.twitter.scalding.ShuffleJob")
.source(Tsv("fakeInput"), input)
.sink[(List[Int])](Tsv("fakeOutput")) { outBuf =>
outBuf(0) must be_==(expectedShuffle)
}
.run.finish
}
}
class MapToGroupBySizeSumMaxJob(args: Args) extends Job(args) {
TextLine(args("input")).read.
//1 is the line
mapTo(1-> ('kx,'x)) { line : String =>
val x = line.toDouble
((x > 0.5),x)
}.
groupBy('kx) { _.size.sum[Double]('x->'sx).max('x) }.
write( Tsv(args("output")) )
}
class MapToGroupBySizeSumMaxTest extends Specification {
noDetailedDiffs()
"A MapToGroupBySizeSumMaxJob" should {
val r = new java.util.Random
//Here is our input data:
val input = (0 to 100).map { i : Int => (i.toString, r.nextDouble.toString) }
//Here is our expected output:
val goldenOutput = input.map { case (line : String, x : String) =>
val xv = x.toDouble;
((xv > 0.5), xv)
}.
groupBy { case (kx : Boolean, x : Double) => kx }.
mapValues { vals =>
val vlist = vals.map { case (k:Boolean, x:Double) => x }.toList
val size = vlist.size
val sum = vlist.sum
val max = vlist.max
(size, sum, max)
}
//Now we have the expected input and output:
JobTest("com.twitter.scalding.MapToGroupBySizeSumMaxJob").
arg("input","fakeInput").
arg("output","fakeOutput").
source(TextLine("fakeInput"), input).
sink[(Boolean,Int,Double,Double)](Tsv("fakeOutput")) { outBuf =>
val actualOutput = outBuf.map {
case (k:Boolean, sz : Int, sm : Double, mx : Double) =>
(k, (sz,sm,mx) )
}.toMap
"produce correct size, sum, max" in {
goldenOutput must be_==(actualOutput)
}
}.
run.
finish
}
}
class PartitionJob(args: Args) extends Job(args) {
Tsv("input", new Fields("age", "weight"))
.partition('age -> 'isAdult) { (_:Int) > 18 } { _.average('weight) }
.project('isAdult, 'weight)
.write(Tsv("output"))
}
class PartitionJobTest extends Specification {
noDetailedDiffs()
"A PartitionJob" should {
val input = List((3, 23),(23,154),(15,123),(53,143),(7,85),(19,195),
(42,187),(35,165),(68,121),(13,103),(17,173),(2,13))
val (adults, minors) = input.partition { case (age, _) => age > 18 }
val Seq(adultWeights, minorWeights) = Seq(adults, minors).map { list =>
list.map { case (_, weight) => weight }
}
val expectedOutput = Map(
true -> adultWeights.sum / adultWeights.size.toDouble,
false -> minorWeights.sum / minorWeights.size.toDouble
)
JobTest(new com.twitter.scalding.PartitionJob(_))
.source(Tsv("input", new Fields("age", "weight")), input)
.sink[(Boolean,Double)](Tsv("output")) { outBuf =>
outBuf.toMap must be_==(expectedOutput)
}
.run.finish
}
}
class MRMJob(args : Args) extends Job(args) {
val in = Tsv("input").read.mapTo((0,1) -> ('x,'y)) { xy : (Int,Int) => xy }
// XOR reduction (insane, I guess:
in.groupBy('x) { _.reduce('y) { (left : Int, right : Int) => left ^ right } }
.write(Tsv("outputXor"))
// set
val setPipe = in.groupBy('x) { _.mapReduceMap('y -> 'y) { (input : Int) => Set(input) }
{ (left : Set[Int], right : Set[Int]) => left ++ right }
{ (output : Set[Int]) => output.toList }
}
setPipe.flatten[Int]('y -> 'y)
.write(Tsv("outputSet"))
setPipe.flattenTo[Int]('y -> 'y)
.write(Tsv("outputSetTo"))
}
class MRMTest extends Specification {
noDetailedDiffs() //Fixes an issue with scala 2.9
"A MRMJob" should {
val input = List((0,1),(0,2),(1,3),(1,1))
JobTest("com.twitter.scalding.MRMJob")
.source(Tsv("input"), input)
.sink[(Int,Int)](Tsv("outputXor")) { outBuf =>
"use reduce to compute xor" in {
outBuf.toList.sorted must be_==(List((0,3),(1,2)))
}
}
.sink[(Int,Int)](Tsv("outputSet")) { outBuf =>
"use mapReduceMap to round-trip input" in {
outBuf.toList.sorted must be_==(input.sorted)
}
}
.sink[Int](Tsv("outputSetTo")) { outBuf =>
"use flattenTo" in {
outBuf.toList.sorted must be_==(input.map { _._2 }.sorted)
}
}
.run
.finish
}
}
class JoinJob(args: Args) extends Job(args) {
val p1 = Tsv(args("input1"))
.read
.mapTo((0, 1) -> ('k1, 'v1)) { v : (String, Int) => v }
val p2 = Tsv(args("input2"))
.read
.mapTo((0, 1) -> ('k2, 'v2)) { v : (String, Int) => v }
p1.joinWithSmaller('k1 -> 'k2, p2)
.project('k1, 'v1, 'v2)
.write( Tsv(args("output")) )
}
class JoinTest extends Specification {
noDetailedDiffs() //Fixes an issue with scala 2.9
"A JoinJob" should {
val input1 = List("a" -> 1, "b" -> 2, "c" -> 3)
val input2 = List("b" -> -1, "c" -> 5, "d" -> 4)
val correctOutput = Map("b" -> (2, -1), "c" -> (3, 5))
JobTest("com.twitter.scalding.JoinJob")
.arg("input1", "fakeInput1")
.arg("input2", "fakeInput2")
.arg("output", "fakeOutput")
.source(Tsv("fakeInput1"), input1)
.source(Tsv("fakeInput2"), input2)
.sink[(String,Int,Int)](Tsv("fakeOutput")) { outBuf =>
val actualOutput = outBuf.map {
case (k : String, v1 : Int, v2 : Int) =>
(k,(v1, v2))
}.toMap
"join tuples with the same key" in {
correctOutput must be_==(actualOutput)
}
}
.run
.finish
}
}
class CollidingKeyJoinJob(args: Args) extends Job(args) {
val p1 = Tsv(args("input1"))
.read
.mapTo((0, 1) -> ('k1, 'v1)) { v : (String, Int) => v }
// An an extra fake key to do a join
.map('k1 -> 'k2) { (k : String) => k + k }
val p2 = Tsv(args("input2"))
.read
.mapTo((0, 1) -> ('k1, 'v2)) { v : (String, Int) => v }
// An an extra fake key to do a join
.map('k1 -> 'k3) { (k : String) => k + k }
p1.joinWithSmaller(('k1,'k2) -> ('k1,'k3), p2)
.write( Tsv(args("output")) )
}
class CollidingKeyJoinTest extends Specification {
noDetailedDiffs() //Fixes an issue with scala 2.9
"A CollidingKeyJoinJob" should {
val input1 = List("a" -> 1, "b" -> 2, "c" -> 3)
val input2 = List("b" -> -1, "c" -> 5, "d" -> 4)
val correctOutput = Map("b" -> (2, "bb", -1, "bb"), "c" -> (3, "cc", 5, "cc"))
JobTest("com.twitter.scalding.CollidingKeyJoinJob")
.arg("input1", "fakeInput1")
.arg("input2", "fakeInput2")
.arg("output", "fakeOutput")
.source(Tsv("fakeInput1"), input1)
.source(Tsv("fakeInput2"), input2)
.sink[(String,Int,String,Int,String)](Tsv("fakeOutput")) { outBuf =>
val actualOutput = outBuf.map {
case (k : String, v1 : Int, k2 : String, v2 : Int, k3 : String) =>
(k,(v1, k2, v2, k3))
}.toMap
"join tuples with the same key" in {
correctOutput must be_==(actualOutput)
}
}
.run
.finish
}
}
class TinyJoinJob(args: Args) extends Job(args) {
val p1 = Tsv(args("input1"))
.read
.mapTo((0, 1) -> ('k1, 'v1)) { v : (String, Int) => v }
val p2 = Tsv(args("input2"))
.read
.mapTo((0, 1) -> ('k2, 'v2)) { v : (String, Int) => v }
p1.joinWithTiny('k1 -> 'k2, p2)
.project('k1, 'v1, 'v2)
.write( Tsv(args("output")) )
}
class TinyJoinTest extends Specification {
noDetailedDiffs() //Fixes an issue with scala 2.9
"A TinyJoinJob" should {
val input1 = List("a" -> 1, "b" -> 2, "c" -> 3)
val input2 = List("b" -> -1, "c" -> 5, "d" -> 4)
val correctOutput = Map("b" -> (2, -1), "c" -> (3, 5))
JobTest("com.twitter.scalding.TinyJoinJob")
.arg("input1", "fakeInput1")
.arg("input2", "fakeInput2")
.arg("output", "fakeOutput")
.source(Tsv("fakeInput1"), input1)
.source(Tsv("fakeInput2"), input2)
.sink[(String,Int,Int)](Tsv("fakeOutput")) { outBuf =>
val actualOutput = outBuf.map {
case (k : String, v1 : Int, v2 : Int) =>
(k,(v1, v2))
}.toMap
"join tuples with the same key" in {
correctOutput must be_==(actualOutput)
}
}
.run
.runHadoop
.finish
}
}
class TinyCollisionJoinJob(args: Args) extends Job(args) {
val p1 = Tsv(args("input1"))
.read
.mapTo((0, 1) -> ('k1, 'v1)) { v : (String, Int) => v }
val p2 = Tsv(args("input2"))
.read
.mapTo((0, 1) -> ('k1, 'v2)) { v : (String, Int) => v }
p1.joinWithTiny('k1 -> 'k1, p2)
.write( Tsv(args("output")) )
}
class TinyCollisionJoinTest extends Specification {
noDetailedDiffs() //Fixes an issue with scala 2.9
"A TinyCollisionJoinJob" should {
val input1 = List("a" -> 1, "b" -> 2, "c" -> 3)
val input2 = List("b" -> -1, "c" -> 5, "d" -> 4)
val correctOutput = Map("b" -> (2, -1), "c" -> (3, 5))
JobTest("com.twitter.scalding.TinyCollisionJoinJob")
.arg("input1", "fakeInput1")
.arg("input2", "fakeInput2")
.arg("output", "fakeOutput")
.source(Tsv("fakeInput1"), input1)
.source(Tsv("fakeInput2"), input2)
.sink[(String,Int,Int)](Tsv("fakeOutput")) { outBuf =>
val actualOutput = outBuf.map {
case (k : String, v1 : Int, v2 : Int) =>
(k,(v1, v2))
}.toMap
"join tuples with the same key" in {
correctOutput must be_==(actualOutput)
}
}
.run
.finish
}
}
class TinyThenSmallJoin(args : Args) extends Job(args) {
val pipe0 = Tsv("in0",('x0,'y0)).read
val pipe1 = Tsv("in1",('x1,'y1)).read
val pipe2 = Tsv("in2",('x2,'y2)).read
pipe0.joinWithTiny('x0 -> 'x1, pipe1)
.joinWithSmaller('x0 -> 'x2, pipe2)
.map(('y0, 'y1, 'y2) -> ('y0, 'y1, 'y2)) { v : (TC,TC,TC) =>
(v._1.n, v._2.n, v._3.n)
}
.project('x0, 'y0, 'x1, 'y1, 'x2, 'y2)
.write(Tsv("out"))
}
case class TC(val n : Int)
class TinyThenSmallJoinTest extends Specification with FieldConversions {
noDetailedDiffs() //Fixes an issue with scala 2.9
"A TinyThenSmallJoin" should {
val input0 = List((1,TC(2)),(2,TC(3)),(3,TC(4)))
val input1 = List((1,TC(20)),(2,TC(30)),(3,TC(40)))
val input2 = List((1,TC(200)),(2,TC(300)),(3,TC(400)))
val correct = List((1,2,1,20,1,200),
(2,3,2,30,2,300),(3,4,3,40,3,400))
JobTest("com.twitter.scalding.TinyThenSmallJoin")
.source(Tsv("in0",('x0,'y0)), input0)
.source(Tsv("in1",('x1,'y1)), input1)
.source(Tsv("in2",('x2,'y2)), input2)
.sink[(Int,Int,Int,Int,Int,Int)](Tsv("out")) { outBuf =>
val actualOutput = outBuf.toList.sorted
println(actualOutput)
"join tuples with the same key" in {
correct must be_==(actualOutput)
}
}
.run
.runHadoop
.finish
}
}
class LeftJoinJob(args: Args) extends Job(args) {
val p1 = Tsv(args("input1"))
.mapTo((0, 1) -> ('k1, 'v1)) { v : (String, Int) => v }
val p2 = Tsv(args("input2"))
.mapTo((0, 1) -> ('k2, 'v2)) { v : (String, Int) => v }
p1.leftJoinWithSmaller('k1 -> 'k2, p2)
.project('k1, 'v1, 'v2)
// Null sent to TSV will not be read in properly
.map('v2 -> 'v2) { v : AnyRef => Option(v).map { _.toString }.getOrElse("NULL") }
.write( Tsv(args("output")) )
}
class LeftJoinTest extends Specification {
noDetailedDiffs() //Fixes an issue with scala 2.9
"A LeftJoinJob" should {
val input1 = List("a" -> 1, "b" -> 2, "c" -> 3)
val input2 = List("b" -> -1, "c" -> 5, "d" -> 4)
val correctOutput = Map[String,(Int,AnyRef)]("a" -> (1,"NULL"), "b" -> (2, "-1"),
"c" -> (3, "5"))
JobTest("com.twitter.scalding.LeftJoinJob")
.arg("input1", "fakeInput1")
.arg("input2", "fakeInput2")
.arg("output", "fakeOutput")
.source(Tsv("fakeInput1"), input1)
.source(Tsv("fakeInput2"), input2)
.sink[(String,Int,JInt)](Tsv("fakeOutput")) { outBuf =>
val actualOutput = outBuf.map { input : (String,Int,AnyRef) =>
println(input)
val (k, v1, v2) = input
(k,(v1, v2))
}.toMap
"join tuples with the same key" in {
correctOutput must be_==(actualOutput)
}
}
.run
.runHadoop
.finish
}
}
class LeftJoinWithLargerJob(args: Args) extends Job(args) {
val p1 = Tsv(args("input1"))
.mapTo((0, 1) -> ('k1, 'v1)) { v : (String, Int) => v }
val p2 = Tsv(args("input2"))
.mapTo((0, 1) -> ('k2, 'v2)) { v : (String, Int) => v }
// Note i am specifying the joiner explicitly since this did not work properly before (leftJoinWithLarger always worked)
p1.joinWithLarger('k1 -> 'k2, p2, new cascading.pipe.joiner.LeftJoin)
.project('k1, 'v1, 'v2)
// Null sent to TSV will not be read in properly
.map('v2 -> 'v2) { v : AnyRef => Option(v).map { _.toString }.getOrElse("NULL") }
.write( Tsv(args("output")) )
}
class LeftJoinWithLargerTest extends Specification {
noDetailedDiffs() //Fixes an issue with scala 2.9
"A LeftJoinWithLargerJob" should {
val input1 = List("a" -> 1, "b" -> 2, "c" -> 3)
val input2 = List("b" -> -1, "c" -> 5, "d" -> 4)
val correctOutput = Map[String,(Int,AnyRef)]("a" -> (1,"NULL"), "b" -> (2, "-1"),
"c" -> (3, "5"))
JobTest("com.twitter.scalding.LeftJoinWithLargerJob")
.arg("input1", "fakeInput1")
.arg("input2", "fakeInput2")
.arg("output", "fakeOutput")
.source(Tsv("fakeInput1"), input1)
.source(Tsv("fakeInput2"), input2)
.sink[(String,Int,JInt)](Tsv("fakeOutput")) { outBuf =>
val actualOutput = outBuf.map { input : (String,Int,AnyRef) =>
println(input)
val (k, v1, v2) = input
(k,(v1, v2))
}.toMap
"join tuples with the same key" in {
correctOutput must be_==(actualOutput)
}
}
.run
.runHadoop
.finish
}
}
class MergeTestJob(args : Args) extends Job(args) {
val in = TextLine(args("in")).read.mapTo(1->('x,'y)) { line : String =>
val p = line.split(" ").map { _.toDouble }
(p(0),p(1))
}
val big = in.filter('x) { (x:Double) => (x > 0.5) }
val small = in.filter('x) { (x:Double) => (x <= 0.5) }
(big ++ small).groupBy('x) { _.max('y) }
.write(Tsv(args("out")))
// Self merge should work
(big ++ big).groupBy('x) { _.max('y) }
.write(Tsv("out2"))
}
class MergeTest extends Specification {
noDetailedDiffs() //Fixes an issue with scala 2.9
"A MergeTest" should {
val r = new java.util.Random
//Here is our input data:
val input = (0 to 100).map { i => (i.toString, r.nextDouble.toString +" "+ r.nextDouble.toString) }
//Here is our expected output:
val parsed = input.map { case (line : String, x : String) =>
val t = x.split(" ").map { _.toDouble }
(t(0),t(1))
}
val big = parsed.filter( _._1 > 0.5 )
val small = parsed.filter( _._1 <= 0.5 )
val golden = (big ++ small).groupBy{ _._1 }.mapValues { itup => (itup.map{ _._2 }.max) }
//Now we have the expected input and output:
JobTest("com.twitter.scalding.MergeTestJob").
arg("in","fakeInput").
arg("out","fakeOutput").
source(TextLine("fakeInput"), input).
sink[(Double,Double)](Tsv("fakeOutput")) { outBuf =>
"correctly merge two pipes" in {
golden must be_==(outBuf.toMap)
}
}.
sink[(Double,Double)](Tsv("out2")) { outBuf =>
"correctly self merge" in {
outBuf.toMap must be_==(big.groupBy(_._1).mapValues{iter => iter.map(_._2).max})
}
}.
run.
finish
}
}
class SizeAveStdJob(args : Args) extends Job(args) {
TextLine(args("input")).mapTo('x,'y) { line =>
val p = line.split(" ").map { _.toDouble }.slice(0,2)
(p(0),p(1))
}.map('x -> 'x) { (x : Double) => (4 * x).toInt }
.groupBy('x) {
_.sizeAveStdev('y->('size,'yave,'ystdev))
//Make sure this doesn't ruin the calculation
.sizeAveStdev('y->('size2,'yave2,'ystdev2))
.average('y)
}
.project('x,'size,'yave,'ystdev,'y)
.write(Tsv(args("output")))
}
class SizeAveStdSpec extends Specification {
"A sizeAveStd job" should {
"correctly compute aves and standard deviations" in {
val r = new java.util.Random
def powerLawRand = {
// Generates a 1/x powerlaw with a max value or 1e40
scala.math.pow(1e40, r.nextDouble)
}
//Here is our input data:
val input = (0 to 10000).map { i => (i.toString, r.nextDouble.toString +" "+ powerLawRand.toString) }
val output = input.map { numline => numline._2.split(" ").map { _.toDouble } }
.map { vec => ((vec(0)*4).toInt, vec(1)) }
.groupBy { tup => tup._1 }
.mapValues { tups =>
val all = tups.map { tup => tup._2.toDouble }.toList
val size = all.size.toLong
val ave = all.sum / size
//Compute the standard deviation:
val vari = all.map { x => (x-ave)*(x-ave) }.sum / (size)
val stdev = scala.math.sqrt(vari)
(size, ave, stdev)
}
JobTest(new SizeAveStdJob(_)).
arg("input","fakeInput").
arg("output","fakeOutput").
source(TextLine("fakeInput"), input).
sink[(Int,Long,Double,Double,Double)](Tsv("fakeOutput")) { outBuf =>
"correctly compute size, ave, stdev" in {
outBuf.foreach { computed =>
val correctTup = output(computed._1)
//Size
computed._2 must be_== (correctTup._1)
//Ave
computed._3/correctTup._2 must beCloseTo(1.0, 1e-6)
//Stdev
computed._4/correctTup._3 must beCloseTo(1.0, 1e-6)
//Explicitly calculated Average:
computed._5/computed._3 must beCloseTo(1.0, 1e-6)
}
}
}.
run.
finish
}
}
}
class DoubleGroupJob(args : Args) extends Job(args) {
TextLine(args("in")).mapTo('x, 'y) { line =>
val p = line.split(" ")
(p(0),p(1))
}
.groupBy('x) { _.size }
.groupBy('size ) { _.size('cnt) }
.write(Tsv(args("out")))
}
class DoubleGroupSpec extends Specification {
"A DoubleGroupJob" should {
"correctly generate output" in {
JobTest("com.twitter.scalding.DoubleGroupJob").
arg("in","fakeIn").
arg("out","fakeOut").
source(TextLine("fakeIn"), List("0" -> "one 1",
"1" -> "two 1",
"2" -> "two 2",
"3" -> "three 3",
"4" -> "three 4",
"5" -> "three 5",
"6" -> "just one"
)).
sink[(Long,Long)](Tsv("fakeOut")) { outBuf =>
"correctly build histogram" in {
val outM = outBuf.toMap
outM(1) must be_== (2) //both one and just keys occur only once
outM(2) must be_== (1)
outM(3) must be_== (1)
}
}.
run.
finish
}
}
}
class GroupUniqueJob(args : Args) extends Job(args) {
TextLine(args("in")).mapTo('x, 'y) { line =>
val p = line.split(" ")
(p(0),p(1))
}
.groupBy('x) { _.size }
.unique('size )
.write(Tsv(args("out")))
}
class GroupUniqueSpec extends Specification {
"A GroupUniqueJob" should {
JobTest("com.twitter.scalding.GroupUniqueJob").
arg("in","fakeIn").
arg("out","fakeOut").
source(TextLine("fakeIn"), List("0" -> "one 1",
"1" -> "two 1",
"2" -> "two 2",
"3" -> "three 3",
"4" -> "three 4",
"5" -> "three 5",
"6" -> "just one"
)).
sink[(Long)](Tsv("fakeOut")) { outBuf =>
"correctly count unique sizes" in {
val outSet = outBuf.toSet
outSet.size must_== 3
}
}.
run.
finish
}
}
class DiscardTestJob(args : Args) extends Job(args) {
TextLine(args("in")).flatMapTo('words) { line => line.split("\\\\s+") }
.map('words -> 'wsize) { word : String => word.length }
.discard('words)
.map('* -> 'correct) { te : TupleEntry => !te.getFields.contains('words) }
.groupAll { _.forall('correct -> 'correct) { x : Boolean => x } }
.write(Tsv(args("out")))
}
class DiscardTest extends Specification {
"A DiscardTestJob" should {
JobTest("com.twitter.scalding.DiscardTestJob")
.arg("in","fakeIn")
.arg("out","fakeOut")
.source(TextLine("fakeIn"), List("0" -> "hello world", "1" -> "foo", "2" -> "bar"))
.sink[Boolean](Tsv("fakeOut")) { outBuf =>
"must reduce down to one line" in {
outBuf.size must_== 1
}
"must correctly discard word column" in {
outBuf(0) must beTrue
}
}
.run
.finish
}
}
class HistogramJob(args : Args) extends Job(args) {
TextLine(args("in")).read
.groupBy('line) { _.size }
.groupBy('size) { _.size('freq) }
.write(Tsv(args("out")))
}
class HistogramTest extends Specification {
"A HistogramJob" should {
JobTest("com.twitter.scalding.HistogramJob")
.arg("in","fakeIn")
.arg("out","fakeOut")
.source(TextLine("fakeIn"), List("0" -> "single", "1" -> "single"))
.sink[(Long,Long)](Tsv("fakeOut")) { outBuf =>
"must reduce down to a single line for a trivial input" in {
outBuf.size must_== 1
}
"must get the result right" in {
outBuf(0) must_== (2L,1L)
}
}
.run
.finish
}
}
class ForceReducersJob(args : Args) extends Job(args) {
TextLine("in").read
.rename((0, 1) -> ('num, 'line))
.flatMap('line -> 'words){l : String => l.split(" ")}
.groupBy('num){ _.toList[String]('words -> 'wordList).forceToReducers }
.map('wordList -> 'wordList){w : List[String] => w.mkString(" ")}
.project('num, 'wordList)
.write(Tsv("out"))
}
class ForceReducersTest extends Specification {
"A ForceReducersJob" should {
JobTest("com.twitter.scalding.ForceReducersJob")
.source(TextLine("in"), List("0" -> "single test", "1" -> "single result"))
.sink[(Int,String)](Tsv("out")) { outBuf =>
"must get the result right" in {
//need to convert to sets because order
outBuf(0)._2.split(" ").toSet must_== Set("single", "test")
outBuf(1)._2.split(" ").toSet must_== Set("single", "result")
}
}
.run
.runHadoop
.finish
}
}
class ToListJob(args : Args) extends Job(args) {
TextLine(args("in")).read
.flatMap('line -> 'words){l : String => l.split(" ")}
.groupBy('offset){ _.toList[String]('words -> 'wordList) }
.map('wordList -> 'wordList){w : List[String] => w.mkString(" ")}
.project('offset, 'wordList)
.write(Tsv(args("out")))
}
class NullListJob(args : Args) extends Job(args) {
TextLine(args("in")).read
.groupBy('offset){ _.toList[String]('line -> 'lineList).spillThreshold(100) }
.map('lineList -> 'lineList) { ll : List[String] => ll.mkString(" ") }
.write(Tsv(args("out")))
}
class ToListTest extends Specification {
"A ToListJob" should {
JobTest("com.twitter.scalding.ToListJob")
.arg("in","fakeIn")
.arg("out","fakeOut")
.source(TextLine("fakeIn"), List("0" -> "single test", "1" -> "single result"))
.sink[(Int,String)](Tsv("fakeOut")) { outBuf =>
"must have the right number of lines" in {
outBuf.size must_== 2
}
"must get the result right" in {
//need to convert to sets because order
outBuf(0)._2.split(" ").toSet must_== Set("single", "test")
outBuf(1)._2.split(" ").toSet must_== Set("single", "result")
}
}
.run
.finish
}
"A NullListJob" should {
JobTest("com.twitter.scalding.NullListJob")
.arg("in","fakeIn")
.arg("out","fakeOut")
.source(TextLine("fakeIn"), List("0" -> null, "0" -> "a", "0" -> null, "0" -> "b"))
.sink[(Int,String)](Tsv("fakeOut")) { outBuf =>
"must have the right number of lines" in {
outBuf.size must_== 1
}
"must return an empty list for null key" in {
val sSet = outBuf(0)._2.split(" ").toSet
sSet must_== Set("a", "b")
}
}
.run
.finish
}
}
class CrossJob(args : Args) extends Job(args) {
val p1 = Tsv(args("in1")).read
.mapTo((0,1) -> ('x,'y)) { tup : (Int, Int) => tup }
val p2 = Tsv(args("in2")).read
.mapTo(0->'z) { (z : Int) => z}
p1.crossWithTiny(p2).write(Tsv(args("out")))
}
class CrossTest extends Specification {
noDetailedDiffs()
"A CrossJob" should {
JobTest("com.twitter.scalding.CrossJob")
.arg("in1","fakeIn1")
.arg("in2","fakeIn2")
.arg("out","fakeOut")
.source(Tsv("fakeIn1"), List(("0","1"),("1","2"),("2","3")))
.source(Tsv("fakeIn2"), List("4","5").map { Tuple1(_) })
.sink[(Int,Int,Int)](Tsv("fakeOut")) { outBuf =>
"must look exactly right" in {
outBuf.size must_==6
outBuf.toSet must_==(Set((0,1,4),(0,1,5),(1,2,4),(1,2,5),(2,3,4),(2,3,5)))
}
}
.run
.runHadoop
.finish
}
}
class GroupAllCrossJob(args : Args) extends Job(args) {
val p1 = Tsv(args("in1")).read
.mapTo((0,1) -> ('x,'y)) { tup : (Int, Int) => tup }
.groupAll { _.max('x) }
.map('x -> 'x) { x : Int => List(x) }
val p2 = Tsv(args("in2")).read
.mapTo(0->'z) { (z : Int) => z}
p2.crossWithTiny(p1)
.map('x -> 'x) { l: List[Int] => l.size }
.project('x, 'z)
.write(Tsv(args("out")))
}
class GroupAllCrossTest extends Specification {
noDetailedDiffs()
"A GroupAllCrossJob" should {
JobTest(new GroupAllCrossJob(_))
.arg("in1","fakeIn1")
.arg("in2","fakeIn2")
.arg("out","fakeOut")
.source(Tsv("fakeIn1"), List(("0","1"),("1","2"),("2","3")))
.source(Tsv("fakeIn2"), List("4","5").map { Tuple1(_) })
.sink[(Int,Int)](Tsv("fakeOut")) { outBuf =>
"must look exactly right" in {
outBuf.size must_==2
outBuf.toSet must_==(Set((1,4), (1,5)))
}
}
.run
.runHadoop
.finish
}
}
class SmallCrossJob(args : Args) extends Job(args) {
val p1 = Tsv(args("in1")).read
.mapTo((0,1) -> ('x,'y)) { tup : (Int, Int) => tup }
val p2 = Tsv(args("in2")).read
.mapTo(0->'z) { (z : Int) => z}
p1.crossWithSmaller(p2).write(Tsv(args("out")))
}
class SmallCrossTest extends Specification {
noDetailedDiffs()
"A SmallCrossJob" should {
JobTest("com.twitter.scalding.SmallCrossJob")
.arg("in1","fakeIn1")
.arg("in2","fakeIn2")
.arg("out","fakeOut")
.source(Tsv("fakeIn1"), List(("0","1"),("1","2"),("2","3")))
.source(Tsv("fakeIn2"), List("4","5").map { Tuple1(_) })
.sink[(Int,Int,Int)](Tsv("fakeOut")) { outBuf =>
"must look exactly right" in {
outBuf.size must_==6
outBuf.toSet must_==(Set((0,1,4),(0,1,5),(1,2,4),(1,2,5),(2,3,4),(2,3,5)))
}
}
.run
.runHadoop
.finish
}
}
class TopKJob(args : Args) extends Job(args) {
Tsv(args("in")).read
.mapTo(0 -> 'x) { (tup : Int) => tup }
//Take the smallest 3 values:
.groupAll { _.sortedTake[Int]('x->'x, 3) }
.write(Tsv(args("out")))
}
class TopKTest extends Specification {
"A TopKJob" should {
JobTest("com.twitter.scalding.TopKJob")
.arg("in","fakeIn")
.arg("out","fakeOut")
.source(Tsv("fakeIn"), List(3,24,1,4,5).map { Tuple1(_) } )
.sink[List[Int]](Tsv("fakeOut")) { outBuf =>
"must look exactly right" in {
outBuf.size must_==1
outBuf(0) must be_==(List(1,3,4))
}
}
.run
.finish
}
}
class ScanJob(args : Args) extends Job(args) {
Tsv("in",('x,'y,'z))
.groupBy('x) {
_.sortBy('y)
.scanLeft('y -> 'ys)(0) { (oldV : Int, newV : Int) => oldV + newV }
}
.project('x,'ys,'z)
.map('z -> 'z) { z : Int => z } //Make sure the null z is converted to an int
.write(Tsv("out"))
}
class ScanTest extends Specification {
import Dsl._
noDetailedDiffs()
"A ScanJob" should {
JobTest("com.twitter.scalding.ScanJob")
.source(Tsv("in",('x,'y,'z)), List((3,0,1),(3,1,10),(3,5,100)) )
.sink[(Int,Int,Int)](Tsv("out")) { outBuf => ()
val correct = List((3,0,0),(3,0,1),(3,1,10),(3,6,100))
"have a working scanLeft" in {
outBuf.toList must be_== (correct)
}
}
.run
.runHadoop
.finish
}
}
class TakeJob(args : Args) extends Job(args) {
val input = Tsv("in").read
.mapTo((0,1,2) -> ('x,'y,'z)) { tup : (Int,Int,Int) => tup }
input.groupBy('x) { _.take(2) }.write(Tsv("out2"))
input.groupAll.write(Tsv("outall"))
}
class TakeTest extends Specification {
noDetailedDiffs()
"A TakeJob" should {
JobTest("com.twitter.scalding.TakeJob")
.source(Tsv("in"), List((3,0,1),(3,1,10),(3,5,100)) )
.sink[(Int,Int,Int)](Tsv("outall")) { outBuf => ()
"groupAll must see everything in same order" in {
outBuf.size must_==3
outBuf.toList must be_== (List((3,0,1),(3,1,10),(3,5,100)))
}
}
.sink[(Int,Int,Int)](Tsv("out2")) { outBuf =>
"take(2) must only get 2" in {
outBuf.size must_==2
outBuf.toList must be_== (List((3,0,1),(3,1,10)))
}
}
.run
.finish
}
}
class DropJob(args : Args) extends Job(args) {
val input = Tsv("in").read
.mapTo((0,1,2) -> ('x,'y,'z)) { tup : (Int,Int,Int) => tup }
input.groupBy('x) { _.drop(2) }.write(Tsv("out2"))
input.groupAll.write(Tsv("outall"))
}
class DropTest extends Specification {
noDetailedDiffs()
"A DropJob" should {
JobTest("com.twitter.scalding.DropJob")
.source(Tsv("in"), List((3,0,1),(3,1,10),(3,5,100)) )
.sink[(Int,Int,Int)](Tsv("outall")) { outBuf => ()
"groupAll must see everything in same order" in {
outBuf.size must_==3
outBuf.toList must be_== (List((3,0,1),(3,1,10),(3,5,100)))
}
}
.sink[(Int,Int,Int)](Tsv("out2")) { outBuf =>
"drop(2) must only get 1" in {
outBuf.toList must be_== (List((3,5,100)))
}
}
.run
.finish
}
}
class PivotJob(args : Args) extends Job(args) {
Tsv("in",('k,'w,'y,'z)).read
.unpivot(('w,'y,'z) -> ('col, 'val))
.write(Tsv("unpivot"))
.groupBy('k) {
_.pivot(('col,'val) -> ('w,'y,'z))
}.write(Tsv("pivot"))
.unpivot(('w,'y,'z) -> ('col, 'val))
.groupBy('k) {
_.pivot(('col,'val) -> ('w,'y,'z,'default), 2.0)
}.write(Tsv("pivot_with_default"))
}
class PivotTest extends Specification with FieldConversions {
noDetailedDiffs()
val input = List(("1","a","b","c"),("2","d","e","f"))
"A PivotJob" should {
JobTest("com.twitter.scalding.PivotJob")
.source(Tsv("in",('k,'w,'y,'z)), input)
.sink[(String,String,String)](Tsv("unpivot")) { outBuf =>
"unpivot columns correctly" in {
outBuf.size must_== 6
outBuf.toList.sorted must be_== (List(("1","w","a"),("1","y","b"),("1","z","c"),
("2","w","d"),("2","y","e"),("2","z","f")).sorted)
}
}
.sink[(String,String,String,String)](Tsv("pivot")) { outBuf =>
"pivot back to the original" in {
outBuf.size must_==2
outBuf.toList.sorted must be_== (input.sorted)
}
}
.sink[(String,String,String,String,Double)](Tsv("pivot_with_default")) { outBuf =>
"pivot back to the original with the missing column replace by the specified default" in {
outBuf.size must_==2
outBuf.toList.sorted must be_== (List(("1","a","b","c",2.0),("2","d","e","f",2.0)).sorted)
}
}
.run
.finish
}
}
class IterableSourceJob(args : Args) extends Job(args) {
val list = List((1,2,3),(4,5,6),(3,8,9))
val iter = IterableSource(list, ('x,'y,'z))
Tsv("in",('x,'w))
.joinWithSmaller('x->'x, iter)
.write(Tsv("out"))
Tsv("in",('x,'w))
.joinWithTiny('x->'x, iter)
.write(Tsv("tiny"))
//Now without fields and using the implicit:
Tsv("in",('x,'w))
.joinWithTiny('x -> 0, list).write(Tsv("imp"))
}
class IterableSourceTest extends Specification with FieldConversions {
noDetailedDiffs()
val input = List((1,10),(2,20),(3,30))
"A IterableSourceJob" should {
JobTest("com.twitter.scalding.IterableSourceJob")
.source(Tsv("in",('x,'w)), input)
.sink[(Int,Int,Int,Int)](Tsv("out")) { outBuf =>
"Correctly joinWithSmaller" in {
outBuf.toList.sorted must be_== (List((1,10,2,3),(3,30,8,9)))
}
}
.sink[(Int,Int,Int,Int)](Tsv("tiny")) { outBuf =>
"Correctly joinWithTiny" in {
outBuf.toList.sorted must be_== (List((1,10,2,3),(3,30,8,9)))
}
}
.sink[(Int,Int,Int,Int,Int)](Tsv("imp")) { outBuf =>
"Correctly implicitly joinWithTiny" in {
outBuf.toList.sorted must be_== (List((1,10,1,2,3),(3,30,3,8,9)))
}
}
.run
.runHadoop
.finish
}
}
class HeadLastJob(args : Args) extends Job(args) {
Tsv("input",('x,'y)).groupBy('x) {
_.sortBy('y)
.head('y -> 'yh).last('y -> 'yl)
}.write(Tsv("output"))
}
class HeadLastTest extends Specification {
import Dsl._
noDetailedDiffs()
val input = List((1,10),(1,20),(1,30),(2,0))
"A HeadLastJob" should {
JobTest("com.twitter.scalding.HeadLastJob")
.source(Tsv("input",('x,'y)), input)
.sink[(Int,Int,Int)](Tsv("output")) { outBuf =>
"Correctly do head/last" in {
outBuf.toList must be_==(List((1,10,30),(2,0,0)))
}
}
.run
.finish
}
}
class HeadLastUnsortedJob(args : Args) extends Job(args) {
Tsv("input",('x,'y)).groupBy('x) {
_.head('y -> 'yh).last('y -> 'yl)
}.write(Tsv("output"))
}
class HeadLastUnsortedTest extends Specification {
import Dsl._
noDetailedDiffs()
val input = List((1,10),(1,20),(1,30),(2,0))
"A HeadLastUnsortedTest" should {
JobTest("com.twitter.scalding.HeadLastUnsortedJob")
.source(Tsv("input",('x,'y)), input)
.sink[(Int,Int,Int)](Tsv("output")) { outBuf =>
"Correctly do head/last" in {
outBuf.toList must be_==(List((1,10,30),(2,0,0)))
}
}
.run
.finish
}
}
class MkStringToListJob(args : Args) extends Job(args) {
Tsv("input", ('x,'y)).groupBy('x) {
_.sortBy('y)
.mkString('y -> 'ystring,",")
.toList[Int]('y -> 'ylist)
}.write(Tsv("output"))
}
class MkStringToListTest extends Specification with FieldConversions {
noDetailedDiffs()
val input = List((1,30),(1,10),(1,20),(2,0))
"A IterableSourceJob" should {
JobTest("com.twitter.scalding.MkStringToListJob")
.source(Tsv("input",('x,'y)), input)
.sink[(Int,String,List[Int])](Tsv("output")) { outBuf =>
"Correctly do mkString/toList" in {
outBuf.toSet must be_==(Set((1,"10,20,30",List(10,20,30)),(2,"0",List(0))))
}
}
.run
// This can't be run in Hadoop mode because we can't serialize the list to Tsv
.finish
}
}
class InsertJob(args : Args) extends Job(args) {
Tsv("input", ('x, 'y)).insert(('z, 'w), (1,2)).write(Tsv("output"))
}
class InsertJobTest extends Specification {
import Dsl._
noDetailedDiffs()
val input = List((2,2), (3,3))
"An InsertJob" should {
JobTest(new com.twitter.scalding.InsertJob(_))
.source(Tsv("input", ('x, 'y)), input)
.sink[(Int, Int, Int, Int)](Tsv("output")) { outBuf =>
"Correctly insert a constant" in {
outBuf.toSet must be_==(Set((2,2,1,2), (3,3,1,2)))
}
}
.run
.finish
}
}
class FoldJob(args : Args) extends Job(args) {
import scala.collection.mutable.{Set => MSet}
Tsv("input", ('x,'y)).groupBy('x) {
// DON'T USE MUTABLE, IT IS UNCOOL AND DANGEROUS!, but we test, just in case
_.foldLeft('y -> 'yset)(MSet[Int]()){(ms : MSet[Int], y : Int) =>
ms += y
ms
}
}.write(Tsv("output"))
}
class FoldJobTest extends Specification {
import Dsl._
import scala.collection.mutable.{Set => MSet}
noDetailedDiffs()
val input = List((1,30),(1,10),(1,20),(2,0))
"A FoldTestJob" should {
JobTest("com.twitter.scalding.FoldJob")
.source(Tsv("input",('x,'y)), input)
.sink[(Int,MSet[Int])](Tsv("output")) { outBuf =>
"Correctly do a fold with MutableSet" in {
outBuf.toSet must be_==(Set((1,MSet(10,20,30)),(2,MSet(0))))
}
}
.run
// This can't be run in Hadoop mode because we can't serialize the list to Tsv
.finish
}
}
// TODO make a Product serializer that clean $outer parameters
case class V(v : Int)
class InnerCaseJob(args : Args) extends Job(args) {
val res = TypedTsv[Int]("input")
.mapTo(('xx, 'vx)) { x => (x*x, V(x)) }
.groupBy('xx) { _.head('vx) }
.map('vx -> 'x) { v : V => v.v }
.project('x, 'xx)
.write(Tsv("output"))
}
class InnerCaseTest extends Specification {
import Dsl._
noDetailedDiffs()
val input = List(Tuple1(1),Tuple1(2),Tuple1(2),Tuple1(4))
"An InnerCaseJob" should {
JobTest(new com.twitter.scalding.InnerCaseJob(_))
.source(TypedTsv[Int]("input"), input)
.sink[(Int,Int)](Tsv("output")) { outBuf =>
"Correctly handle inner case classes" in {
outBuf.toSet must be_==(Set((1,1),(2,4),(4,16)))
}
}
.runHadoop
.finish
}
}
class NormalizeJob(args : Args) extends Job(args) {
Tsv("in")
.read
.mapTo((0,1) -> ('x,'y)) { tup : (Double, Int) => tup }
.normalize('x)
.project('x, 'y)
.write(Tsv("out"))
}
class NormalizeTest extends Specification {
noDetailedDiffs()
"A NormalizeJob" should {
JobTest("com.twitter.scalding.NormalizeJob")
.source(Tsv("in"), List(("0.3", "1"), ("0.3", "1"), ("0.3",
"1"), ("0.3", "1")))
.sink[(Double, Int)](Tsv("out")) { outBuf =>
"must be normalized" in {
outBuf.size must_== 4
outBuf.toSet must_==(Set((0.25,1),(0.25,1),(0.25,1),(0.25,1)))
}
}
.run
.finish
}
}
class ApproxUniqJob(args : Args) extends Job(args) {
Tsv("in",('x,'y))
.read
.groupBy('x) { _.approxUniques('y -> 'ycnt) }
.write(Tsv("out"))
}
class ApproxUniqTest extends Specification {
import Dsl._
noDetailedDiffs()
"A ApproxUniqJob" should {
val input = (1 to 1000).flatMap { i => List(("x0", i), ("x1", i)) }.toList
JobTest("com.twitter.scalding.ApproxUniqJob")
.source(Tsv("in",('x,'y)), input)
.sink[(String, Double)](Tsv("out")) { outBuf =>
"must approximately count" in {
outBuf.size must_== 2
val kvresult = outBuf.groupBy { _._1 }.mapValues { _.head._2 }
kvresult("x0") must beCloseTo(1000.0, 30.0) //We should be 1%, but this is on average, so
kvresult("x1") must beCloseTo(1000.0, 30.0) //We should be 1%, but this is on average, so
}
}
.run
.finish
}
}
class ForceToDiskJob(args : Args) extends Job(args) {
val x = Tsv("in", ('x,'y))
.read
.filter('x) { x : Int => x > 0 }
.rename('x -> 'x1)
Tsv("in",('x,'y))
.read
.joinWithTiny('y -> 'y, x.forceToDisk)
.project('x,'x1,'y)
.write(Tsv("out"))
}
class ForceToDiskTest extends Specification {
import Dsl._
noDetailedDiffs()
"A ForceToDiskJob" should {
val input = (1 to 1000).flatMap { i => List((-1, i), (1, i)) }.toList
JobTest(new ForceToDiskJob(_))
.source(Tsv("in",('x,'y)), input)
.sink[(Int,Int,Int)](Tsv("out")) { outBuf =>
"run correctly when combined with joinWithTiny" in {
outBuf.size must_== 2000
val correct = (1 to 1000).flatMap { y => List((1,1,y),(-1,1,y)) }.sorted
outBuf.toList.sorted must_== correct
}
}
.run
.runHadoop
.finish
}
}
class ThrowsErrorsJob(args : Args) extends Job(args) {
Tsv("input",('letter, 'x))
.read
.addTrap(Tsv("trapped"))
.map(('letter, 'x) -> 'yPrime){ fields : Product =>
val x = fields.productElement(1).asInstanceOf[Int]
if (x == 1) throw new Exception("Erroneous Ones") else x }
.write(Tsv("output"))
}
class ItsATrapTest extends Specification {
import Dsl._
noDetailedDiffs() //Fixes an issue with scala 2.9
"An AddTrap" should {
val input = List(("a", 1),("b", 2), ("c", 3), ("d", 1), ("e", 2))
JobTest(new ThrowsErrorsJob(_))
.source(Tsv("input",('letter,'x)), input)
.sink[(String, Int)](Tsv("output")) { outBuf =>
"must contain all numbers in input except for 1" in {
outBuf.toList.sorted must be_==(List(("b", 2), ("c", 3), ("e", 2)))
}
}
.sink[(String, Int)](Tsv("trapped")) { outBuf =>
"must contain all 1s and fields in input" in {
outBuf.toList.sorted must be_==(List(("a", 1), ("d", 1)))
}
}
.run
.finish
}
}
class GroupAllToListTestJob(args: Args) extends Job(args) {
TypedTsv[(Long, String, Double)]("input")
.mapTo('a, 'b) { case(id, k, v) => (id, Map(k -> v)) }
.groupBy('a) { _.sum[Map[String, Double]]('b) }
.groupAll {
_.toList[(Long, Map[String, Double])](('a, 'b) -> 'abList)
}
.map('abList -> 'abMap) {
list : List[(Long, Map[String, Double])] => list.toMap
}
.project('abMap)
.map('abMap -> 'abMap) { x: AnyRef => x.toString }
.write(Tsv("output"))
}
class GroupAllToListTest extends Specification {
import Dsl._
noDetailedDiffs()
"A GroupAllToListTestJob" should {
val input = List((1L, "a", 1.0), (1L, "b", 2.0), (2L, "a", 1.0), (2L, "b", 2.0))
val output = Map(2L -> Map("a" -> 1.0, "b" -> 2.0), 1L -> Map("a" -> 1.0, "b" -> 2.0))
JobTest(new GroupAllToListTestJob(_))
.source(TypedTsv[(Long, String, Double)]("input"), input)
.sink[String](Tsv("output")) { outBuf =>
"must properly aggregate stuff into a single map" in {
outBuf.size must_== 1
outBuf(0) must be_==(output.toString)
}
}
.runHadoop
.finish
}
}
class ToListGroupAllToListTestJob(args: Args) extends Job(args) {
TypedTsv[(Long, String)]("input")
.mapTo('b, 'c) { case(k, v) => (k, v) }
.groupBy('c) { _.toList[Long]('b -> 'bList) }
.groupAll {
_.toList[(String, List[Long])](('c, 'bList) -> 'cbList)
}
.project('cbList)
.write(Tsv("output"))
}
class ToListGroupAllToListSpec extends Specification {
import Dsl._
noDetailedDiffs()
val expected = List(("us", List(1)), ("jp", List(3, 2)), ("gb", List(3, 1)))
"A ToListGroupAllToListTestJob" should {
JobTest(new ToListGroupAllToListTestJob(_))
.source(TypedTsv[(Long, String)]("input"), List((1L, "us"), (1L, "gb"), (2L, "jp"), (3L, "jp"), (3L, "gb")))
.sink[String](Tsv("output")) { outBuf =>
"must properly aggregate stuff in hadoop mode" in {
outBuf.size must_== 1
outBuf.head must_== expected.toString
println(outBuf.head)
}
}
.runHadoop
.finish
JobTest(new ToListGroupAllToListTestJob(_))
.source(TypedTsv[(Long, String)]("input"), List((1L, "us"), (1L, "gb"), (2L, "jp"), (3L, "jp"), (3L, "gb")))
.sink[List[(String, List[Long])]](Tsv("output")) { outBuf =>
"must properly aggregate stuff in local model" in {
outBuf.size must_== 1
outBuf.head must_== expected
println(outBuf.head)
}
}
.run
.finish
}
}
// TODO: HangingTest is very flaky now because we enabled multi-thread testing. Need to be fixed later.
/*
class HangingJob(args : Args) extends Job(args) {
val x = Tsv("in", ('x,'y))
.read
.filter('x, 'y) { t: (Int, Int) =>
val (x, y) = t
timeout(Millisecs(2)) {
if (y % 2 == 1) Thread.sleep(1000)
x > 0
} getOrElse false
}
.write(Tsv("out"))
}
class HangingTest extends Specification {
import Dsl._
noDetailedDiffs()
"A HangingJob" should {
val input = (1 to 100).flatMap { i => List((-1, i), (1, i)) }.toList
JobTest(new HangingJob(_))
.source(Tsv("in",('x,'y)), input)
.sink[(Int,Int)](Tsv("out")) { outBuf =>
"run correctly when task times out" in {
//outBuf.size must_== 100
//val correct = (1 to 100).map { i => (1, i) }
outBuf.size must_== 50
val correct = (1 to 50).map { i => (1, i*2) }
outBuf.toList.sorted must_== correct
}
}
.run
.runHadoop
.finish
}
}
*/
class Function2Job(args : Args) extends Job(args) {
import FunctionImplicits._
Tsv("in", ('x,'y)).mapTo(('x, 'y) -> 'xy) { (x: String, y: String) => x + y }.write(Tsv("output"))
}
class Function2Test extends Specification {
import Dsl._
noDetailedDiffs() //Fixes an issue with scala 2.9
"A Function2Job" should {
val input = List(("a", "b"))
JobTest("com.twitter.scalding.Function2Job")
.source(Tsv("in",('x,'y)), input)
.sink[String](Tsv("output")) { outBuf =>
"convert a function2 to tupled function1" in {
outBuf must be_==(List("ab"))
}
}
.run
.finish
}
}
class SampleWithReplacementJob(args : Args) extends Job(args) {
val input = Tsv("in").read
.sampleWithReplacement(1.0, 0)
.write(Tsv("output"))
}
class SampleWithReplacementTest extends Specification {
import com.twitter.scalding.mathematics.Poisson
val p = new Poisson(1.0, 0)
val simulated = (1 to 100).map{
i => i -> p.nextInt
}.filterNot(_._2 == 0).toSet
noDetailedDiffs()
"A SampleWithReplacementJob" should {
JobTest("com.twitter.scalding.SampleWithReplacementJob")
.source(Tsv("in"), (1 to 100).map(i => i) )
.sink[Int](Tsv("output")) { outBuf => ()
"sampleWithReplacement must sample items according to a poisson distribution" in {
outBuf.toList.groupBy(i => i)
.map(p => p._1 -> p._2.size)
.filterNot(_._2 == 0).toSet must_== simulated
}
}
.run
.finish
}
}
class VerifyTypesJob(args: Args) extends Job(args) {
Tsv("input", new Fields("age", "weight"))
.addTrap(Tsv("trap"))
.verifyTypes[(Int, Int)]('age -> 'weight)
.verifyTypes[Int]('weight)
.write(Tsv("output"))
}
class VerifyTypesJobTest extends Specification {
"Verify types operation" should {
"put bad records in a trap" in {
val input = List((3, "aaa"),(23,154),(15,"123"),(53,143),(7,85),(19,195),
(42,187),(35,165),(68,121),(13,"34"),(17,173),(2,13),(2,"break"))
JobTest(new com.twitter.scalding.VerifyTypesJob(_))
.source(Tsv("input", new Fields("age", "weight")), input)
.sink[(Int, Int)](Tsv("output")) { outBuf =>
outBuf.toList.size must_== input.size - 2
}
.sink[(Any, Any)](Tsv("trap")) { outBuf =>
outBuf.toList.size must_== 2
}
.run
.finish
}
}
}
class SortingJob(args : Args) extends Job(args) {
Tsv("in", ('x, 'y, 'z))
.read
.groupAll(_.sortBy('y))
.write(Tsv("output"))
}
class SortingJobTest extends Specification {
import Dsl._
noDetailedDiffs()
"A SortingJob" should {
JobTest(new SortingJob(_))
.source(Tsv("in", ('x, 'y, 'z)), (1 to 100).map(i => (i, i*i % 5, i*i*i)) )
.sink[(Int,Int,Int)](Tsv("output")) { outBuf =>
"keep all the columns" in {
val correct = (1 to 100).map(i => (i, i*i % 5, i*i*i)).toList.sortBy(_._2)
outBuf.toList must_==(correct)
}
}
.run
.finish
}
}
| vidyar/twitterscalding | scalding-core/src/test/scala/com/twitter/scalding/CoreTest.scala | Scala | apache-2.0 | 51,773 |
package geotrellis.vector.op.data
import geotrellis.process._
import geotrellis.geometry._
import geotrellis._
import geotrellis._
/**
* Split multipolygon into polygons.
*/
case class SplitMultiPolygon(m:Op[MultiPolygon]) extends Op1(m)({
mp => Result(mp.polygons)
})
| Tjoene/thesis | Case_Programs/geotrellis-0.7.0/src/main/scala/geotrellis/vector/op/data/SplitMultiPolygon.scala | Scala | gpl-2.0 | 275 |
/*
* Copyright (c) 2013-2014 TelefΓ³nica InvestigaciΓ³n y Desarrollo S.A.U.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package es.tid.cosmos.infinity
import org.scalatest.FlatSpec
import org.scalatest.matchers.MustMatchers
import es.tid.cosmos.infinity.common.fs.RootPath
class UserPathMapperTest extends FlatSpec with MustMatchers {
"User paths" must "map to a subpath of /user" in {
UserPathMapper.absolutePath(RootPath) must be (RootPath / "user")
UserPathMapper.absolutePath(RootPath / "foo") must be (RootPath / "user" / "foo")
}
it must "be mapped back from absolute paths" in {
UserPathMapper.userPath(RootPath / "user") must be (RootPath)
UserPathMapper.userPath(RootPath / "user" / "foo") must be (RootPath / "foo")
}
it must "throw when transforming back is not possible" in {
evaluating {
UserPathMapper.userPath(RootPath / "other_dir")
} must produce [IllegalArgumentException]
}
}
| telefonicaid/fiware-cosmos-platform | infinity/driver/src/test/scala/es/tid/cosmos/infinity/UserPathMapperTest.scala | Scala | apache-2.0 | 1,463 |
/*
* Copyright 2009-2017. DigitalGlobe, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and limitations under the License.
*/
package org.mrgeo.utils
import java.awt.image.DataBuffer
import java.io._
import java.net.URI
import java.nio._
import java.nio.file.Files
import java.util.zip.GZIPOutputStream
import javax.xml.bind.DatatypeConverter
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings
import org.apache.commons.io.IOUtils
import org.apache.commons.lang3.ArrayUtils
import org.apache.hadoop.fs.{FSDataInputStream, Path}
import org.gdal.gdal.{Band, Dataset, Driver, gdal}
import org.gdal.gdalconst.gdalconstConstants
import org.gdal.ogr.ogr
import org.gdal.osr.{CoordinateTransformation, SpatialReference, osr, osrConstants}
import org.mrgeo.core.{MrGeoConstants, MrGeoProperties}
import org.mrgeo.hdfs.utils.HadoopFileUtils
import org.mrgeo.utils.MrGeoImplicits._
import org.mrgeo.utils.tms.{Bounds, TMSUtils}
import scala.collection.JavaConversions._
class GDALException extends IOException {
private var origException:Exception = _
def this(e:Exception) {
this()
origException = e
}
def this(msg:String) {
this()
origException = new Exception(msg)
}
def this(msg:String, e:Exception) {
this()
origException = new Exception(msg, e)
}
override def printStackTrace() {
this.origException.printStackTrace()
}
}
@SuppressFBWarnings(value = Array("RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE"), justification = "Scala generated code")
@SuppressFBWarnings(value = Array("PZLA_PREFER_ZERO_LENGTH_ARRAYS"), justification = "api")
@SuppressFBWarnings(value = Array("PATH_TRAVERSAL_IN"), justification = "delete() Only deletes a valid GDAL file the user has permission to load and delete")
object GDALUtils extends Logging {
val EPSG4326:String = osrConstants.SRS_WKT_WGS84
private val VSI_PREFIX:String = "/vsimem/"
private val GDAL_PAM_ENABLED:String = "GDAL_PAM_ENABLED"
initializeGDAL()
// empty method to force static initializer
def register():Unit = {}
def isValidDataset(imagename:String):Boolean = {
try {
val image:Dataset = GDALUtils.open(imagename)
GDALUtils.close(image)
return true
}
catch {
case ignored:IOException =>
}
false
}
def open(stream:InputStream):Dataset = {
open(IOUtils.toByteArray(stream))
}
def open(bytes:Array[Byte]):Dataset = {
val imagename:String = "stream" + HadoopUtils.createRandomString(5)
gdal.FileFromMemBuffer(VSI_PREFIX + imagename, bytes)
val image = gdal.Open(VSI_PREFIX + imagename)
if (image != null) {
logDebug(" Image loaded successfully: " + imagename)
image
}
else {
logInfo(
"Image not loaded, but unfortunately no exceptions were thrown, look for a logged explanation somewhere above")
null
}
}
def createEmptyMemoryRaster(src:Dataset, width:Int, height:Int):Dataset = {
val bands:Int = src.getRasterCount
var datatype:Int = -1
val nodatas = Array.newBuilder[Double]
val nodata = Array.ofDim[java.lang.Double](1)
var i:Int = 1
while (i <= src.GetRasterCount()) {
val band:Band = src.GetRasterBand(i)
if (datatype < 0) {
datatype = band.getDataType
}
band.GetNoDataValue(nodata)
nodatas += (if (nodata(0) == null) {
java.lang.Double.NaN
}
else {
nodata(0)
})
i += 1
}
createEmptyMemoryRaster(width, height, bands, datatype, nodatas.result())
}
def createEmptyMemoryRaster(width:Int, height:Int, bands:Int, datatype:Int,
nodatas:Array[Double] = null):Dataset = {
val driver:Driver = gdal.GetDriverByName("MEM")
val dataset = driver.Create("InMem", width, height, bands, datatype)
if (dataset != null) {
if (nodatas != null) {
var i:Int = 1
while (i <= dataset.getRasterCount) {
val nodata:Double = if (i < nodatas.length) {
nodatas(i - 1)
}
else {
nodatas(nodatas.length - 1)
}
val band:Band = dataset.GetRasterBand(i)
band.Fill(nodata)
band.SetNoDataValue(nodata)
i += 1
}
}
return dataset
}
null
}
def createEmptyDiskBasedRaster(src:Dataset, width:Int, height:Int):Dataset = {
val bands:Int = src.getRasterCount
var datatype:Int = -1
val nodatas = Array.newBuilder[Double]
val nodata = Array.ofDim[java.lang.Double](1)
var i:Int = 1
while (i <= src.GetRasterCount()) {
val band:Band = src.GetRasterBand(i)
if (datatype < 0) {
datatype = band.getDataType
}
band.GetNoDataValue(nodata)
nodatas += (if (nodata(0) == null) {
java.lang.Double.NaN
}
else {
nodata(0)
})
i += 1
}
createEmptyDiskBasedRaster(width, height, bands, datatype, nodatas.result())
}
def createEmptyDiskBasedRaster(width:Int, height:Int, bands:Int, datatype:Int,
nodatas:Array[Double] = null):Dataset = {
val driver:Driver = gdal.GetDriverByName("GTiff")
val f = File.createTempFile("gdal-tmp-", ".tif")
val filename = f.getCanonicalPath
val dataset = driver.Create(filename, width, height, bands, datatype)
if (dataset != null) {
if (nodatas != null) {
var i:Int = 1
while (i <= dataset.getRasterCount) {
val nodata:Double = if (i < nodatas.length) {
nodatas(i - 1)
}
else {
nodatas(nodatas.length - 1)
}
val band:Band = dataset.GetRasterBand(i)
band.Fill(nodata)
band.SetNoDataValue(nodata)
i += 1
}
}
return dataset
}
null
}
def createUnfilledMemoryRaster(src:Dataset, width:Int, height:Int):Dataset = {
val bands:Int = src.getRasterCount
val datatype:Int = src.GetRasterBand(1).getDataType
createUnfilledMemoryRaster(width, height, bands, datatype)
}
def createUnfilledMemoryRaster(width:Int, height:Int, bands:Int, datatype:Int):Dataset = {
val driver:Driver = gdal.GetDriverByName("MEM")
driver.Create("InMem", width, height, bands, datatype)
}
def createUnfilledDiskBasedRaster(src:Dataset, width:Int, height:Int):Dataset = {
val bands:Int = src.getRasterCount
val datatype:Int = src.GetRasterBand(1).getDataType
createUnfilledDiskBasedRaster(width, height, bands, datatype)
}
def createUnfilledDiskBasedRaster(width:Int, height:Int, bands:Int, datatype:Int):Dataset = {
val driver:Driver = gdal.GetDriverByName("GTiff")
val f = File.createTempFile("gdal-tmp-", ".tif")
val filename = f.getCanonicalPath
driver.Create(filename, width, height, bands, datatype)
}
def toGDALDataType(rasterType:Int):Int = {
rasterType match {
case DataBuffer.TYPE_BYTE => gdalconstConstants.GDT_Byte
case DataBuffer.TYPE_SHORT => gdalconstConstants.GDT_Int16
case DataBuffer.TYPE_USHORT => gdalconstConstants.GDT_UInt16
case DataBuffer.TYPE_INT => gdalconstConstants.GDT_Int32
case DataBuffer.TYPE_FLOAT => gdalconstConstants.GDT_Float32
case DataBuffer.TYPE_DOUBLE => gdalconstConstants.GDT_Float64
case _ => gdalconstConstants.GDT_Unknown
}
}
def toRasterDataBufferType(gdaldatatype:Int):Int = {
gdaldatatype match {
case gdalconstConstants.GDT_Byte => DataBuffer.TYPE_BYTE
case gdalconstConstants.GDT_UInt16 => DataBuffer.TYPE_USHORT
case gdalconstConstants.GDT_Int16 => DataBuffer.TYPE_SHORT
case gdalconstConstants.GDT_UInt32 => DataBuffer.TYPE_INT
case gdalconstConstants.GDT_Int32 => DataBuffer.TYPE_INT
case gdalconstConstants.GDT_Float32 => DataBuffer.TYPE_FLOAT
case gdalconstConstants.GDT_Float64 => DataBuffer.TYPE_DOUBLE
case _ => DataBuffer.TYPE_UNDEFINED
}
}
def swapBytes(bytes:Array[Byte], gdaldatatype:Int):Unit = {
var tmp:Byte = 0
var i:Int = 0
gdaldatatype match {
// Since it's byte data, there is nothing to swap - do nothing
case gdalconstConstants.GDT_Byte =>
// 2 byte value... swap byte 1 with 2
case gdalconstConstants.GDT_UInt16 | gdalconstConstants.GDT_Int16 =>
while (i + 1 < bytes.length) {
tmp = bytes(i)
bytes(i) = bytes(i + 1)
bytes(i + 1) = tmp
i += 2
}
// 4 byte value... swap bytes 1 & 4, 2 & 3
case gdalconstConstants.GDT_UInt32 | gdalconstConstants.GDT_Int32 | gdalconstConstants.GDT_Float32 =>
while (i + 3 < bytes.length) {
// swap 0 & 3
tmp = bytes(i)
bytes(i) = bytes(i + 3)
bytes(i + 3) = tmp
// swap 1 & 2
tmp = bytes(i + 1)
bytes(i + 1) = bytes(i + 2)
bytes(i + 2) = tmp
i += 4
}
// 8 byte value... swap bytes 1 & 8, 2 & 7, 3 & 6, 4 & 5
case gdalconstConstants.GDT_Float64 =>
while (i + 7 < bytes.length) {
// swap 0 & 7
tmp = bytes(i)
bytes(i) = bytes(i + 7)
bytes(i + 7) = tmp
// swap 1 & 6
tmp = bytes(i + 1)
bytes(i + 1) = bytes(i + 6)
bytes(i + 6) = tmp
// swap 2 & 5
tmp = bytes(i + 2)
bytes(i + 2) = bytes(i + 5)
bytes(i + 5) = tmp
// swap 3 & 4
tmp = bytes(i + 3)
bytes(i + 3) = bytes(i + 4)
bytes(i + 4) = tmp
i += 8
}
}
}
def getnodatas(imagename:String):Array[Number] = {
val ds = open(imagename)
if (ds != null) {
try {
return getnodatas(ds)
}
finally {
close(ds)
}
}
throw new GDALException("Error opening image: " + imagename)
}
def getnodatas(image:Dataset):Array[Number] = {
val bands = image.GetRasterCount
val nodatas = Array.fill[Double](bands)(Double.NaN)
val v = new Array[java.lang.Double](1)
for (i <- 1 to bands) {
val band:Band = image.GetRasterBand(i)
band.GetNoDataValue(v)
nodatas(i - 1) =
if (v(0) != null) {
v(0)
}
else {
band.getDataType match {
case gdalconstConstants.GDT_Byte |
gdalconstConstants.GDT_UInt16 | gdalconstConstants.GDT_Int16 |
gdalconstConstants.GDT_UInt32 | gdalconstConstants.GDT_Int32 => 0
case gdalconstConstants.GDT_Float32 => Float.NaN
case gdalconstConstants.GDT_Float64 => Double.NaN
}
}
}
nodatas
}
@SuppressFBWarnings(value = Array("REC_CATCH_EXCEPTION"), justification = "GDAL may have throw exceptions enabled")
@SuppressFBWarnings(value = Array("PATH_TRAVERSAL_IN"), justification = "GDAL only reads image files")
def open(imagename:String):Dataset = {
var is:FSDataInputStream = null
try {
val uri:URI = new URI(imagename)
logDebug("Loading image with GDAL: " + imagename)
val file:File = new File(uri.getPath)
if (file.exists) {
val image = gdal.Open(file.getCanonicalPath)
if (image != null) {
logDebug(" Image loaded successfully: " + imagename)
return image
}
}
val p = new Path(uri)
val fs = HadoopFileUtils.getFileSystem(p)
is = fs.open(p)
val bytes = IOUtils.toByteArray(is)
val vsiname = VSI_PREFIX + imagename
gdal.FileFromMemBuffer(vsiname, bytes)
val image = gdal.Open(vsiname)
if (image != null) {
logDebug(" Image loaded successfully: " + imagename)
return image
}
logInfo(
"Image not loaded, but unfortunately no exceptions were thrown, look for a logged explanation somewhere above")
}
catch {
case e:Exception => throw new GDALException("Error opening image file: " + imagename, e)
}
finally {
if (is != null) {
is.close()
}
}
null
}
def close(image:Dataset) {
val files = image.GetFileList
val driver = image.GetDriver()
// if ("MEM".equals(driver.getShortName)) {
// image.delete()
// }
image.delete()
// unlink the file from memory if is has been streamed
for (f <- files) {
f match {
case file:String =>
if (file.startsWith(VSI_PREFIX)) {
gdal.Unlink(file)
}
case _ =>
}
}
}
def delete(image:Dataset, deleteNow: Boolean = true): Unit = {
val files = image.GetFileList
image.delete()
for (f <- files) {
f match {
case file:String =>
val f = new File(file)
if (deleteNow) {
f.delete()
}
else {
f.deleteOnExit()
}
case _ =>
}
}
}
def calculateZoom(imagename:String, tilesize:Int):Int = {
try {
val image = GDALUtils.open(imagename)
if (image != null) {
val b = getBounds(image)
val px = b.width() / image.GetRasterXSize
val py = b.height() / image.GetRasterYSize
val zx = TMSUtils.zoomForPixelSize(Math.abs(px), tilesize)
val zy = TMSUtils.zoomForPixelSize(Math.abs(py), tilesize)
GDALUtils.close(image)
if (zx > zy) {
return zx
}
return zy
}
}
catch {
case ignored:IOException =>
}
-1
}
def getBounds(image:Dataset):Bounds = {
val xform = image.GetGeoTransform
val srs = new SpatialReference(image.GetProjection)
val dst = new SpatialReference(EPSG4326)
val tx = new CoordinateTransformation(srs, dst)
val w = image.GetRasterXSize
val h = image.GetRasterYSize
var c1:Array[Double] = null
var c2:Array[Double] = null
var c3:Array[Double] = null
var c4:Array[Double] = null
if (tx != null) {
c1 = tx.TransformPoint(xform(0), xform(3))
c2 = tx.TransformPoint(xform(0) + xform(1) * w, xform(3) + xform(5) * h)
c3 = tx.TransformPoint(xform(0) + xform(1) * w, xform(3))
c4 = tx.TransformPoint(xform(0), xform(3) + xform(5) * h)
}
else {
c1 = Array[Double](xform(0), xform(3))
c2 = Array[Double](xform(0) + xform(1) * w, xform(3) + xform(5) * h)
c3 = Array[Double](xform(0) + xform(1) * w, xform(3))
c4 = Array[Double](xform(0), xform(3) + xform(5) * h)
}
new Bounds(Math.min(Math.min(c1(0), c2(0)), Math.min(c3(0), c4(0))),
Math.min(Math.min(c1(1), c2(1)), Math.min(c3(1), c4(1))),
Math.max(Math.max(c1(0), c2(0)), Math.max(c3(0), c4(0))),
Math.max(Math.max(c1(1), c2(1)), Math.max(c3(1), c4(1))))
}
@SuppressFBWarnings(value = Array("PATH_TRAVERSAL_IN"), justification = "Temp file used for writing to OutputStream")
def saveRaster(raster:Dataset, output:Either[String, OutputStream],
bounds:Bounds = null, nodata:Double = Double.NegativeInfinity,
format:String = "GTiff", options:Array[String] = Array.empty[String]):Unit = {
val filename = output match {
case Left(f) => f
case Right(_) => File.createTempFile("tmp-file", "").getCanonicalPath
}
saveRaster(raster, filename, format, bounds, options)
output match {
case Right(stream) =>
Files.copy(new File(filename).toPath, stream)
stream.flush()
if (!new File(filename).delete()) {
throw new IOException("Error deleting temporary file: " + filename)
}
case _ =>
}
}
def saveRasterTile(raster:Dataset, output:Either[String, OutputStream],
tx:Long, ty:Long, zoom:Int, nodata:Double = Double.NegativeInfinity,
format:String = "GTiff", options:Array[String] = Array.empty[String]):Unit = {
val bounds = TMSUtils.tileBounds(tx, ty, zoom, raster.getRasterXSize)
saveRaster(raster, output, bounds, nodata, format, options)
}
def getRasterDataAsString(ds:Dataset, band:Int, x:Int, y:Int, width:Int, height:Int):String = {
getRasterDataAsString(ds.GetRasterBand(band), x, y, width, height)
}
def getRasterDataAsString(band:Band, x:Int, y:Int, width:Int, height:Int):String = {
new String(getRasterData(band, x, y, width, height))
}
def getRasterDataAsBase64(ds:Dataset, band:Int, x:Int, y:Int, width:Int, height:Int):String = {
getRasterDataAsBase64(ds.GetRasterBand(band), x, y, width, height)
}
def getRasterDataAsBase64(band:Band, x:Int, y:Int, width:Int, height:Int):String = {
val data = getRasterBuffer(band, x, y, width, height)
val rastersize:Int = getRasterBytes(band, width, height)
val chunksize = 3072 // This _must_ be a multiple of 3 for chunking of base64 to work
val builder = StringBuilder.newBuilder
val chunk = Array.ofDim[Byte](chunksize)
var dataremaining = rastersize
while (dataremaining > chunksize) {
data.get(chunk)
builder ++= DatatypeConverter.printBase64Binary(chunk)
dataremaining -= chunksize
}
if (dataremaining > 0) {
val smallchunk = Array.ofDim[Byte](dataremaining)
data.get(smallchunk)
builder ++= DatatypeConverter.printBase64Binary(smallchunk)
}
builder.result()
//DatatypeConverter.printBase64Binary(getRasterData(band, x, y, width, height))
}
def getRasterDataAsCompressedBase64(ds:Dataset, band:Int, x:Int, y:Int, width:Int, height:Int):String = {
getRasterDataAsCompressedBase64(ds.GetRasterBand(band), x, y, width, height)
}
def getRasterDataAsCompressedBase64(band:Band, x:Int, y:Int, width:Int, height:Int):String = {
val data = getRasterDataCompressed(band, x, y, width, height)
val base64 = DatatypeConverter.printBase64Binary(data)
logInfo("raster data: base64: " + base64.length)
base64
}
def getRasterDataAsCompressedString(ds:Dataset, band:Int, x:Int, y:Int, width:Int, height:Int):String = {
getRasterDataAsCompressedString(ds.GetRasterBand(band), x, y, width, height)
}
def getRasterDataAsCompressedString(band:Band, x:Int, y:Int, width:Int, height:Int):String = {
val data = getRasterDataCompressed(band, x, y, width, height)
new String(data, "UTF-8")
}
def getRasterDataCompressed(ds:Dataset, band:Int, x:Int, y:Int, width:Int, height:Int):Array[Byte] = {
getRasterDataCompressed(ds.GetRasterBand(band), x, y, width, height)
}
def getRasterDataCompressed(band:Band, x:Int, y:Int, width:Int, height:Int):Array[Byte] = {
val data = getRasterBuffer(band, x, y, width, height)
val rastersize:Int = getRasterBytes(band, width, height)
logInfo("raster data: original: " + rastersize)
// var base64Str = DatatypeConverter.printBase64Binary(data)
// println("Base64: " + base64Str.length)
// data = null
//
// var base64 = base64Str.getBytes
// println("Base64 bytes: " + base64.length)
// base64Str = null
//
// val outputStream = new ByteArrayOutputStream(base64.length)
val outputStream = new ByteArrayOutputStream(rastersize)
val zipper = new GZIPOutputStream(outputStream)
val chunksize = 4096
val chunk = Array.ofDim[Byte](chunksize)
var dataremaining = rastersize
while (dataremaining > chunksize) {
data.get(chunk)
zipper.write(chunk)
dataremaining -= chunksize
}
if (dataremaining > 0) {
val smallchunk = Array.ofDim[Byte](dataremaining)
data.get(smallchunk)
zipper.write(smallchunk)
}
//zipper.write(base64)
//base64 = null
zipper.close()
outputStream.close()
val output = outputStream.toByteArray
logInfo("raster data: compressed: " + output.length)
output
}
def getRasterData(ds:Dataset, band:Int, x:Int, y:Int, width:Int, height:Int):Array[Byte] = {
getRasterData(ds.GetRasterBand(band), x, y, width, height)
}
def getRasterData(band:Band, x:Int, y:Int, width:Int, height:Int):Array[Byte] = {
val rastersize:Int = getRasterBytes(band, width, height)
val data = getRasterBuffer(band, x, y, width, height)
val bytes = Array.ofDim[Byte](rastersize)
data.get(bytes)
logInfo("read (" + bytes.length + " bytes (I think)")
bytes
}
def getRasterBytes(band:Band):Int = {
getRasterBytes(band, band.GetXSize(), band.GetYSize())
}
def getRasterBytes(ds:Dataset, band:Int):Int = {
val b = ds.GetRasterBand(band)
getRasterBytes(b, b.GetXSize(), b.GetYSize())
}
private def initializeGDAL() = {
// Monkeypatch the system library path to use the gdal paths (for loading the gdal libraries
MrGeoProperties.getInstance().getProperty(MrGeoConstants.GDAL_PATH, "").
split(File.pathSeparator).reverse.foreach(path => {
ClassLoaderUtil.addLibraryPath(path)
})
osr.UseExceptions()
if (gdal.GetDriverCount == 0) {
gdal.AllRegister()
}
if (ogr.GetDriverCount == 0) {
ogr.RegisterAll()
}
val drivers:Int = gdal.GetDriverCount
if (drivers == 0) {
log.error("GDAL libraries were not loaded! This probibly an error.")
}
log.info(gdal.VersionInfo("--version"))
println(gdal.VersionInfo("--version"))
val klass = classOf[gdal]
val location = klass.getResource('/' + klass.getName.replace('.', '/') + ".class")
osr.UseExceptions()
log.info("GDAL jar location: " + location)
println("GDAL jar location: " + location)
println("Java library path: " + System.getProperty("java.library.path"))
if (log.isDebugEnabled) {
log.debug("GDAL Drivers supported:")
for (i <- 0 until drivers) {
val driver:Driver = gdal.GetDriver(i)
logDebug(" " + driver.getLongName + "(" + driver.getShortName + ")")
}
log.debug("GDAL Projections supported:")
for (o <- osr.GetProjectionMethods) {
logDebug(" " + o)
}
}
}
@SuppressFBWarnings(value = Array("RV_RETURN_VALUE_IGNORED_NO_SIDE_EFFECT"), justification = "Scala generated code")
private def saveRaster(ds:Dataset, file:String, format:String, bounds:Bounds, options:Array[String]):Unit = {
val fmt = mapType(format)
val driver = gdal.GetDriverByName(fmt)
val pamEnabled = gdal.GetConfigOption(GDAL_PAM_ENABLED)
gdal.SetConfigOption(GDAL_PAM_ENABLED, "NO")
val moreoptions = fmt.toLowerCase match {
case "gtiff" =>
var moreoptions = ArrayUtils.add(options, "INTERLEAVE=BAND")
moreoptions = ArrayUtils.add(moreoptions, "COMPRESS=DEFLATE")
moreoptions = ArrayUtils.add(moreoptions, "PREDICTOR=1")
moreoptions = ArrayUtils.add(moreoptions, "ZLEVEL=6")
moreoptions = ArrayUtils.add(moreoptions, "TILES=YES")
moreoptions = ArrayUtils
.add(moreoptions, "BLOCKXSIZE=" + (if (ds.getRasterXSize < 2048) {
ds.getRasterXSize
}
else {
2048
}))
moreoptions = ArrayUtils
.add(moreoptions, "BLOCKYSIZE=" + (if (ds.getRasterYSize < 2048) {
ds.getRasterYSize
}
else {
2048
}))
moreoptions
case _ => options
}
val copy:Dataset = driver.CreateCopy(file, ds, 1, moreoptions)
if (copy == null) {
val msg = "Unable to create raster " + file + ". Error message from GDAL is: " + gdal.GetLastErrorMsg()
log.error(msg)
throw new IOException(msg)
}
// add the bounds, if sent in. Reproject if needed
val xform:Array[Double] = new Array[Double](6)
if (bounds != null) {
val proj = ds.GetProjection()
if (proj.length > 0) {
val dst = new SpatialReference(proj)
val src = new SpatialReference(EPSG4326)
val tx = new CoordinateTransformation(src, dst)
var c1:Array[Double] = null
var c2:Array[Double] = null
var c3:Array[Double] = null
var c4:Array[Double] = null
if (tx != null) {
c1 = tx.TransformPoint(bounds.w, bounds.n)
c2 = tx.TransformPoint(bounds.e, bounds.s)
c3 = tx.TransformPoint(bounds.e, bounds.n)
c4 = tx.TransformPoint(bounds.w, bounds.s)
}
val xformed = new Bounds(Math.min(Math.min(c1(0), c2(0)), Math.min(c3(0), c4(0))),
Math.min(Math.min(c1(1), c2(1)), Math.min(c3(1), c4(1))),
Math.max(Math.max(c1(0), c2(0)), Math.max(c3(0), c4(0))),
Math.max(Math.max(c1(1), c2(1)), Math.max(c3(1), c4(1))))
xform(0) = xformed.w
xform(1) = xformed.width / copy.GetRasterXSize()
xform(2) = 0
xform(3) = xformed.n
xform(4) = 0
xform(5) = -xformed.height / copy.GetRasterYSize()
copy.SetProjection(proj)
}
else {
xform(0) = bounds.w
xform(1) = bounds.width / copy.GetRasterXSize()
xform(2) = 0
xform(3) = bounds.n
xform(4) = 0
xform(5) = -bounds.height / copy.GetRasterYSize()
copy.SetProjection(EPSG4326)
}
copy.SetGeoTransform(xform)
}
if (pamEnabled != null) {
gdal.SetConfigOption(GDAL_PAM_ENABLED, pamEnabled)
}
if (copy == null) {
val errno:Int = gdal.GetLastErrorNo
val error:Int = gdal.GetLastErrorType
val msg:String = gdal.GetLastErrorMsg
throw new GDALException("Error saving raster: " + file + "(" + errno + ": " + error + ": " + msg + ")")
}
copy.delete()
}
private def mapType(format:String):String = {
format.toLowerCase match {
case "jpg" => "jpeg"
case "tiff" |
"tif" |
"geotiff" |
"geotif" |
"gtif" => "GTiff"
case _ => format
}
}
private def getRasterBytes(band:Band, width:Int, height:Int):Int = {
val datatype = band.getDataType
val pixelsize = gdal.GetDataTypeSize(datatype) / 8
val linesize = pixelsize * width
val rastersize = linesize * height
rastersize
}
private def getRasterBuffer(band:Band, x:Int, y:Int, width:Int, height:Int):ByteBuffer = {
band.ReadRaster_Direct(x, y, width, height, band.getDataType).order(ByteOrder.nativeOrder())
}
}
| ngageoint/mrgeo | mrgeo-core/src/main/scala/org/mrgeo/utils/GDALUtils.scala | Scala | apache-2.0 | 26,637 |
package org.jetbrains.plugins.scala.config.ui
import com.intellij.openapi.project.Project
import org.jetbrains.plugins.scala.config._
/**
* Pavel.Fatin, 04.08.2010
*/
object LibraryDescriptor {
def compilersFor(project: Project): Array[LibraryDescriptor] = {
def list(level: LibraryLevel) = Libraries.findBy(level, project).map { library =>
LibraryDescriptor(LibraryId(library.getName, level), Some(new CompilerLibraryData(library)))
}
val all = list(LibraryLevel.Global) ++ list(LibraryLevel.Project)
val (suitable, remaining) = all.partition(_.data.get.problem.isEmpty)
suitable.sortBy(_.data.get.version.getOrElse("Unknown")).reverse ++ remaining.sortBy(_.id.name.toLowerCase)
}
def createFor(id: LibraryId) = LibraryDescriptor(id, None)
}
case class LibraryDescriptor(id: LibraryId, data: Option[LibraryData]) | consulo/consulo-scala | src/org/jetbrains/plugins/scala/config/ui/LibraryDescriptor.scala | Scala | apache-2.0 | 854 |
package net.itsky.sortsearch.fsort
object StandardMetrics {
val utf16 = (x : String) => {
x.length match {
case 0 => 0L
case 1 => (x.charAt(0) + 1L) * 65537 * 65537 * 16385
case 2 => ((x.charAt(0) + 1L) * 65537 + x.charAt(1) + 1) * 65537 * 16385
case 3 => (((x.charAt(0) + 1L) * 65537 + x.charAt(1) + 1) * 65537 + x.charAt(2) + 1) * 16385
case _ => (((x.charAt(0) + 1L) * 65537 + x.charAt(1) + 1) * 65537 + x.charAt(2) + 1) * 16385 + (x.charAt(3) >> 2) + 1
}
}
def iso646irvC(x : String, idx : Int) : (Int, Boolean) = {
if (idx >= x.length) {
(0, false)
} else {
val c : Char = x.charAt(idx)
if (c < 128) {
(c + 1, true)
} else {
(129, false)
}
}
}
val iso646irv = (x : String) => {
var result : Long = 0L
var cont : Boolean = true
for (idx <- (0 until 8)) {
result *= 130L
if (cont) {
val (cm, contx) = iso646irvC(x, idx)
cont = contx
result += cm
}
}
result
}
def iso88591C(x : String, idx : Int) : (Int, Boolean) = {
if (idx >= x.length) {
(0, false)
} else {
val c : Char = x.charAt(idx)
if (c < 256) {
(c + 1, true)
} else {
(257, false)
}
}
}
val iso88591 = (x : String) => {
var result : Long = 0L
var cont : Boolean = true
for (idx <- (0 until 7)) {
result *= 258
if (cont) {
val (cm, contx) = iso88591C(x, idx)
cont = contx
result += cm
}
}
result
}
def cyrC(x : String, idx : Int) : (Int, Boolean) = {
if (idx >= x.length) {
(0, false)
} else {
val c = x.charAt(idx)
if (c < 0x0080) {
(c + 1, true) // 1..0x0080
} else if (c < 0x0400) {
(0x0081, false)
} else if (c < 0x0500) {
(c - 0x0400 + 0x0082, true) // 0x0082..0x0181
} else {
(0x0182, false) // 386
}
}
}
val cyr = (x : String) => {
var result : Long = 0
var cont : Boolean = true
for (idx <- (0 until 7)) {
result *= 387
if (cont) {
val (cm, contx) = cyrC(x, idx)
cont = contx
result += cm
}
}
result
}
}
| bk1/fsort-scala | src/main/scala/net/itsky/sortsearch/fsort/StandardMetrics.scala | Scala | lgpl-2.1 | 2,238 |
package wandou.math.random
import java.util.Random
import java.util.concurrent.locks.ReentrantLock
/**
* <p>Random number generator based on the
* <a href="http://www.math.sci.hiroshima-u.ac.jp/~m-mat/MT/emt.html" target="_top">Mersenne
* Twister</a> algorithm developed by Makoto Matsumoto and Takuji Nishimura.</p>
*
* <p>This is a very fast random number generator with good statistical
* properties (it passes the full DIEHARD suite). This is the best RNG
* for most experiments. If a non-linear generator is required, use
* the slower {@link AESCounterRNG} RNG.</p>
*
* <p>This PRNG is deterministic, which can be advantageous for testing purposes
* since the output is repeatable. If multiple instances of this class are created
* with the same seed they will all have identical output.</p>
*
* <p>This code is translated from the original C version and assumes that we
* will always seed from an array of bytes. I don't pretend to know the
* meanings of the magic numbers or how it works, it just does.</p>
*
* @author Makoto Matsumoto and Takuji Nishimura (original C version)
* @author Daniel Dyer (Java port)
* @author Caoyuan Deng (Scala port)
*/
class MersenneTwisterRNG private (seed: Array[Byte], mt: Array[Int]) extends Random with RepeatableRNG {
import MersenneTwisterRNG._
// Lock to prevent concurrent modification of the RNG's internal state.
private val lock = new ReentrantLock
private var mtIndex = N // Index into state vector.
def getSeed = seed.clone
override final protected def next(bits: Int): Int = {
var y = 0
try {
lock.lock
if (mtIndex >= N) { // Generate N ints at a time.
var kk = 0
while (kk < N - M) {
y = (mt(kk) & UPPER_MASK) | (mt(kk + 1) & LOWER_MASK)
mt(kk) = mt(kk + M) ^ (y >>> 1) ^ MAG01(y & 0x1)
kk += 1
}
while (kk < N - 1) {
y = (mt(kk) & UPPER_MASK) | (mt(kk + 1) & LOWER_MASK)
mt(kk) = mt(kk + (M - N)) ^ (y >>> 1) ^ MAG01(y & 0x1)
kk += 1
}
y = (mt(N - 1) & UPPER_MASK) | (mt(0) & LOWER_MASK)
mt(N - 1) = mt(M - 1) ^ (y >>> 1) ^ MAG01(y & 0x1)
mtIndex = 0
}
y = mt(mtIndex)
mtIndex += 1
} finally {
lock.unlock
}
// Tempering
y ^= (y >>> 11)
y ^= (y << 7) & GENERATE_MASK1
y ^= (y << 15) & GENERATE_MASK2
y ^= (y >>> 18)
y >>> (32 - bits)
}
}
object MersenneTwisterRNG {
// The actual seed size isn't that important, but it should be a multiple of 4.
private val SEED_SIZE_BYTES = 16
// Magic numbers from original C version.
private val N = 624
private val M = 397
private val MAG01 = Array(0, 0x9908b0df)
private val UPPER_MASK = 0x80000000
private val LOWER_MASK = 0x7fffffff
private val BOOTSTRAP_SEED = 19650218
private val BOOTSTRAP_FACTOR = 1812433253
private val SEED_FACTOR1 = 1664525
private val SEED_FACTOR2 = 1566083941
private val GENERATE_MASK1 = 0x9d2c5680
private val GENERATE_MASK2 = 0xefc60000
/**
* Creates a new RNG and seeds it using the default seeding strategy.
*/
def apply(): MersenneTwisterRNG = {
apply(SeedGenerator.generateSeed(SEED_SIZE_BYTES))
}
/**
* Seed the RNG using the provided seed generation strategy.
* @param seedGenerator The seed generation strategy that will provide
* the seed value for this RNG.
* @throws SeedException If there is a problem generating a seed.
*/
@throws(classOf[SeedException])
def apply(seedGenerator: SeedGenerator): MersenneTwisterRNG = {
apply(seedGenerator.generateSeed(SEED_SIZE_BYTES))
}
/**
* Creates an RNG and seeds it with the specified seed data.
* @param seed The seed data used to initialise the RNG.
*/
def apply(seed: Array[Byte]): MersenneTwisterRNG = {
if (seed == null || seed.length != SEED_SIZE_BYTES) {
throw new IllegalArgumentException("Mersenne Twister RNG requires a 128-bit (16-byte) seed.")
}
val seedInts = BinaryUtils.convertBytesToInts(seed)
val mt = new Array[Int](N)
mt(0) = BOOTSTRAP_SEED
var mtIndex = 1
while (mtIndex < N) {
mt(mtIndex) = (BOOTSTRAP_FACTOR * (mt(mtIndex - 1) ^ (mt(mtIndex - 1) >>> 30)) + mtIndex)
mtIndex += 1
}
var i = 1
var j = 0
var k = math.max(N, seedInts.length)
while (k > 0) {
mt(i) = (mt(i) ^ ((mt(i - 1) ^ (mt(i - 1) >>> 30)) * SEED_FACTOR1)) + seedInts(j) + j
i += 1
j += 1
if (i >= N) {
mt(0) = mt(N - 1)
i = 1
}
if (j >= seedInts.length) {
j = 0
}
k -= 1
}
k = N - 1
while (k > 0) {
mt(i) = (mt(i) ^ ((mt(i - 1) ^ (mt(i - 1) >>> 30)) * SEED_FACTOR2)) - i
i += 1
if (i >= N) {
mt(0) = mt(N - 1)
i = 1
}
k -= 1
}
mt(0) = UPPER_MASK // Most significant bit is 1 - guarantees non-zero initial array.
new MersenneTwisterRNG(seed.clone, mt)
}
} | wandoulabs/wandou-math | wandou-math/src/main/scala/wandou/math/random/MersenneTwisterRNG.scala | Scala | apache-2.0 | 4,972 |
package xyz.hyperreal.mos6502
abstract class Know[+A] {
def isEmpty: Boolean
def get: A
def isDefined = !isEmpty
def getOrElse[B >: A]( default: => B ) = if (isEmpty) default else get
def map[B]( f: A => B ) = if (isEmpty) this.asInstanceOf[Know[Nothing]] else Known( f(get) )
}
case class Known[+A]( x: A ) extends Know[A] {
def isEmpty = false
def get = x
}
case object Unknown extends Know[Nothing] {
def isEmpty = true
def get = throw new NoSuchElementException( "Unknown.get" )
}
case object Knowable extends Know[Nothing] {
def isEmpty = true
def get = throw new NoSuchElementException( "Knowable.get" )
} | edadma/mos6502 | src/main/scala/Know.scala | Scala | isc | 629 |
package com.typesafe.slick.testkit.tests
import org.junit.Assert._
import com.typesafe.slick.testkit.util.{TestkitTest, TestDB}
class PagingTest(val tdb: TestDB) extends TestkitTest {
import tdb.profile.simple._
object IDs extends Table[Int]("ids") {
def id = column[Int]("id", O.PrimaryKey)
def * = id
}
def test {
IDs.ddl.create;
IDs.insertAll((1 to 10):_*)
val q1 = Query(IDs).sortBy(_.id)
println("q1: "+q1.selectStatement)
println(" "+q1.list)
assertEquals((1 to 10).toList, q1.list)
val q2 = q1 take 5
println("q2: "+q2.selectStatement)
println(" "+q2.list)
assertEquals((1 to 5).toList, q2.list)
ifCap(scap.pagingDrop) {
val q3 = q1 drop 5
println("q3: "+q3.selectStatement)
println(" "+q3.list)
assertEquals((6 to 10).toList, q3.list)
val q4 = q1 drop 5 take 3
println("q4: "+q4.selectStatement)
println(" "+q4.list)
assertEquals((6 to 8).toList, q4.list)
val q5 = q1 take 5 drop 3
println("q5: "+q5.selectStatement)
println(" "+q5.list)
assertEquals((4 to 5).toList, q5.list)
}
val q6 = q1 take 0
println("q6: "+q6.selectStatement)
println(" "+q6.list)
assertEquals(List(), q6.list)
}
}
| boldradius/slick | slick-testkit/src/main/scala/com/typesafe/slick/testkit/tests/PagingTest.scala | Scala | bsd-2-clause | 1,279 |
package org.jetbrains.plugins.scala
package codeInspection
package typeChecking
import com.intellij.codeInsight.PsiEquivalenceUtil
import com.intellij.codeInspection.{ProblemHighlightType, ProblemsHolder}
import com.intellij.openapi.application.ApplicationManager
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiElement
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.annotations.Nls
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiUtil
import org.jetbrains.plugins.scala.lang.psi.api.base.patterns.{ScBindingPattern, ScPattern}
import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScExistentialClause, ScTypeElementExt}
import org.jetbrains.plugins.scala.lang.psi.api.expr._
import org.jetbrains.plugins.scala.lang.psi.api.statements.ScPatternDefinition
import org.jetbrains.plugins.scala.lang.psi.api.statements.params.ScParameter
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.ScNamedElement
import org.jetbrains.plugins.scala.lang.psi.api.{ScalaPsiElement, ScalaRecursiveElementVisitor}
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory.createExpressionFromText
import org.jetbrains.plugins.scala.lang.psi.impl.toplevel.synthetic.SyntheticNamedElement
import org.jetbrains.plugins.scala.lang.psi.types.ScTypeExt
import org.jetbrains.plugins.scala.lang.refactoring.namesSuggester.NameSuggester
import org.jetbrains.plugins.scala.lang.refactoring.util.{InplaceRenameHelper, ScalaVariableValidator}
import scala.annotation.{nowarn, tailrec}
import scala.collection.mutable
/**
* Nikolay.Tropin
* 5/6/13
*/
@nowarn("msg=" + AbstractInspection.DeprecationText)
final class TypeCheckCanBeMatchInspection extends AbstractInspection(TypeCheckCanBeMatchInspection.inspectionName) {
import TypeCheckCanBeMatchInspection._
override def actionFor(implicit holder: ProblemsHolder, isOnTheFly: Boolean): PartialFunction[PsiElement, Any] = {
case IsInstanceOfCall(call) =>
for {
ifStmt <- Option(PsiTreeUtil.getParentOfType(call, classOf[ScIf]))
condition <- ifStmt.condition
if findIsInstanceOfCalls(condition, onlyFirst = true).contains(call)
if typeCheckIsUsedEnough(ifStmt, call)
} {
val fix = new TypeCheckCanBeMatchQuickFix(call, ifStmt)
holder.registerProblem(call, inspectionName, ProblemHighlightType.GENERIC_ERROR_OR_WARNING, fix)
}
}
private def typeCheckIsUsedEnough(ifStmt: ScIf, isInstOf: ScGenericCall): Boolean = {
val chainSize = listOfIfAndIsInstOf(ifStmt, isInstOf, onlyFirst = true).size
val typeCastsNumber = findAsInstanceOfCalls(ifStmt.condition, isInstOf).size + findAsInstanceOfCalls(ifStmt.thenExpression, isInstOf).size
chainSize > 1 || typeCastsNumber > 0
}
private class TypeCheckCanBeMatchQuickFix(isInstOfUnderFix: ScGenericCall, ifStmt: ScIf)
extends AbstractFixOnTwoPsiElements(inspectionName, isInstOfUnderFix, ifStmt) {
override protected def doApplyFix(isInstOf: ScGenericCall, ifSt: ScIf)
(implicit project: Project): Unit = {
val (matchStmtOption, renameData) = buildMatchStmt(ifSt, isInstOf, onlyFirst = true)
for (matchStmt <- matchStmtOption) {
val newMatch = inWriteAction {
ifSt.replaceExpression(matchStmt, removeParenthesis = true).asInstanceOf[ScMatch]
}
if (!ApplicationManager.getApplication.isUnitTestMode) {
val renameHelper = new InplaceRenameHelper(newMatch)
setElementsForRename(newMatch, renameHelper, renameData)
renameHelper.startRenaming()
}
}
}
}
}
object TypeCheckCanBeMatchInspection {
val inspectionId = "TypeCheckCanBeMatch"
@Nls
val inspectionName: String = ScalaInspectionBundle.message("type.check.can.be.replaced.by.pattern.matching")
private type RenameData = mutable.ArrayBuffer[(Int, Seq[String])]
def buildMatchStmt(ifStmt: ScIf, isInstOfUnderFix: ScGenericCall, onlyFirst: Boolean)
(implicit project: Project): (Option[ScMatch], RenameData) =
baseExpr(isInstOfUnderFix) match {
case Some(expr: ScExpression) =>
val matchedExprText = expr.getText
val (caseClausesText, renameData) = buildCaseClausesText(ifStmt, isInstOfUnderFix, onlyFirst)
val matchStmtText = s"$matchedExprText match { \n " + caseClausesText + "}"
val matchStmt = createExpressionFromText(matchStmtText).asInstanceOf[ScMatch]
(Some(matchStmt), renameData)
case _ => (None, null)
}
private def buildCaseClauseText(ifStmt: ScIf, isInstOfCall: ScGenericCall, caseClauseIndex: Int, renameData: RenameData): Option[String] = {
var definedName: Option[String] = None
var definition: Option[ScPatternDefinition] = None
//method for finding and saving named type cast
def checkAndStoreNameAndDef(asInstOfCall: ScGenericCall): Boolean = {
ScalaPsiUtil.getContextOfType(asInstOfCall, strict = true, classOf[ScPatternDefinition]) match {
case patternDef: ScPatternDefinition =>
val bindings = patternDef.bindings
//pattern consist of one assignment of asInstanceOf call
if (bindings.size == 1 && patternDef.expr.get == asInstOfCall) {
definition match {
//store first occurence of pattern definition and name
case Some(oldDef) if oldDef.getTextOffset < patternDef.getTextOffset => true
case _ =>
definedName = Some(bindings.head.getName)
definition = Some(patternDef)
true
}
} else false
case null => false
}
}
val typeArgs = isInstOfCall.typeArgs.typeArgs
for {
condition <- ifStmt.condition
if typeArgs.size == 1
} yield {
val typeElem = typeArgs.head
val typeName0 = typeElem.getText
val typeName = PsiTreeUtil.getChildOfType(typeElem, classOf[ScExistentialClause]) match {
case null => typeName0
case _ => "(" + typeName0 + ")"
}
val asInstOfInBody = findAsInstanceOfCalls(ifStmt.thenExpression, isInstOfCall)
val guardCondition = findGuardCondition(condition)(equiv(_, isInstOfCall))
val asInstOfInGuard = findAsInstanceOfCalls(guardCondition, isInstOfCall)
val asInstOfEverywhere = asInstOfInBody ++ asInstOfInGuard
implicit val project: Project = ifStmt.getProject
val name = if (asInstOfInBody.count(checkAndStoreNameAndDef) == 0) {
//no usage of asInstanceOf
asInstOfEverywhere.toSeq match {
case Seq() => "_"
case _ =>
//no named usage
val validator = new ScalaVariableValidator(
ifStmt,
false,
ifStmt.getParent,
ifStmt.getParent
)
val suggestedNames = NameSuggester.suggestNames(asInstOfEverywhere.head, validator)
val text = suggestedNames.head
for {
expression <- asInstOfEverywhere
newExpr = createExpressionFromText(text)
} inWriteAction {
expression.replaceExpression(newExpr, removeParenthesis = true)
}
renameData.addOne((caseClauseIndex, suggestedNames))
text
}
} else {
inWriteAction(definition.get.delete())
val text = definedName.get
val newExpr = createExpressionFromText(text)
inWriteAction {
for {
expression <- asInstOfEverywhere
} expression.replaceExpression(newExpr, removeParenthesis = true)
}
text
}
buildCaseClauseText(name + " : " + typeName, guardCondition, ifStmt.thenExpression)
}
}
private def buildCaseClauseText(patternText: String,
guardCondition: Option[ScExpression],
body: Option[ScExpression])
(implicit project: Project): String = {
val builder = new mutable.StringBuilder()
.append("case ")
.append(patternText)
guardCondition
.map(_.getText)
.foreach(text => builder.append(" if ").append(text))
builder.append(" ")
.append(ScalaPsiUtil.functionArrow)
val elements = body.toList.flatMap {
case block: ScBlock =>
for {
element <- block.children.toList
elementType = element.getNode.getElementType
if elementType != ScalaTokenTypes.tLBRACE &&
elementType != ScalaTokenTypes.tRBRACE
} yield element
case expression => expression :: Nil
}
elements
.map(_.getText)
.foreach(builder.append)
if (!builder.last.isWhitespace) builder.append("\n")
builder.toString
}
def listOfIfAndIsInstOf(currentIfStmt: ScIf, currentCall: ScGenericCall, onlyFirst: Boolean): List[(ScIf, ScGenericCall)] = {
for (currentBase <- baseExpr(currentCall)) {
currentIfStmt.elseExpression match {
case Some(nextIfStmt: ScIf) =>
for {
nextCondition <- nextIfStmt.condition
nextCall <- findIsInstanceOfCalls(nextCondition, onlyFirst)
nextBase <- baseExpr(nextCall)
if equiv(currentBase, nextBase)
} {
return (currentIfStmt, currentCall) :: listOfIfAndIsInstOf(nextIfStmt, nextCall, onlyFirst)
}
return (currentIfStmt, currentCall) :: Nil
case _ => return (currentIfStmt, currentCall) :: Nil
}
}
Nil
}
private def buildCaseClausesText(ifStmt: ScIf, isInstOfUnderFix: ScGenericCall, onlyFirst: Boolean): (String, RenameData) = {
val builder = new mutable.StringBuilder()
val (ifStmts, isInstOf) = listOfIfAndIsInstOf(ifStmt, isInstOfUnderFix, onlyFirst).unzip
val renameData = new RenameData()
for {
index <- ifStmts.indices
text <- buildCaseClauseText(ifStmts(index), isInstOf(index), index, renameData)
} builder.append(text)
if (ifStmts != Nil) {
builder ++= buildCaseClauseText("_", None, ifStmts.last.elseExpression)(ifStmt.getProject)
}
(builder.toString(), renameData)
}
@tailrec
def findIsInstanceOfCalls(condition: ScExpression, onlyFirst: Boolean = false): List[ScGenericCall] = condition match {
case _ if !onlyFirst =>
separateConditions(condition).collect {
case IsInstanceOfCall(call) => call
}
case IsInstanceOfCall(call) => call :: Nil
case IsConjunction(left, _) => findIsInstanceOfCalls(left, onlyFirst)
case ScParenthesisedExpr(expression) => findIsInstanceOfCalls(expression, onlyFirst)
case _ => Nil
}
private def findAsInstanceOfCalls(maybeBody: Option[ScExpression],
isInstOfCall: ScGenericCall): Iterable[ScGenericCall] = maybeBody match {
case Some(body) =>
def baseAndType(call: ScGenericCall) = for {
base <- baseExpr(call)
typeElements = call.typeArgs.typeArgs
if typeElements.size == 1
} yield (base, typeElements.head.calcType)
val result = mutable.ArrayBuffer.empty[ScGenericCall]
val visitor = new ScalaRecursiveElementVisitor {
override def visitGenericCallExpression(call: ScGenericCall): Unit = {
val asInstanceOfCall = call.referencedExpr match {
case ref: ScReferenceExpression if ref.refName == "asInstanceOf" => Option(ref.resolve())
case _ => None
}
if (asInstanceOfCall.exists(_.is[SyntheticNamedElement])) {
for {
(base1, type1) <- baseAndType(isInstOfCall)
(base2, type2) <- baseAndType(call)
if type1.equiv(type2) && equiv(base1, base2)
} result += call
}
super.visitGenericCallExpression(call)
}
}
body.accept(visitor)
result
case _ => Seq.empty
}
def setElementsForRename(matchStmt: ScMatch, renameHelper: InplaceRenameHelper, renameData: RenameData): Unit = {
val caseClauses = matchStmt.clauses.toList
for {
(index, suggestedNames) <- renameData
caseClause = caseClauses(index)
name = suggestedNames.head
} {
val primary = mutable.ArrayBuffer.empty[ScNamedElement]
val dependents = mutable.SortedSet.empty[ScalaPsiElement](Ordering.by(_.getTextOffset))
val patternVisitor = new ScalaRecursiveElementVisitor() {
override def visitPattern(pat: ScPattern): Unit = {
pat match {
case bp: ScBindingPattern if bp.name == name =>
primary += bp
case _ =>
}
super.visitPattern(pat)
}
}
val referenceVisitor = new ScalaRecursiveElementVisitor() {
override def visitReferenceExpression(ref: ScReferenceExpression): Unit = {
for (prim <- primary) {
if (ref.refName == name && ref.resolve() == prim)
dependents += ref
}
super.visitReferenceExpression(ref)
}
}
caseClause.accept(patternVisitor)
caseClause.accept(referenceVisitor)
for (prim <- primary) renameHelper.addGroup(prim, dependents.toSeq, suggestedNames)
}
}
private def baseExpr(call: ScGenericCall) = call.referencedExpr match {
case ref: ScReferenceExpression => ref.qualifier
case _ => None
}
private def findGuardCondition(condition: ScExpression)
(predicate: ScExpression => Boolean): Option[ScExpression] =
separateConditions(condition)
.filterNot(predicate)
.map(_.getText)
.mkString(" && ") match {
case text if text.isEmpty => None
case text => Some(createExpressionFromText(text, condition))
}
private def equiv =
PsiEquivalenceUtil.areElementsEquivalent(
_: PsiElement,
_: PsiElement, {
case (left: PsiElement, right: PsiElement) if left == right => 0
case (left: ScParameter, right: ScParameter) =>
left.name match {
case null => 1
case leftName =>
right.name match {
case null => 1
case rightName => leftName.compareTo(rightName)
}
}
case _ => 1
}: java.util.Comparator[PsiElement],
false
)
private def separateConditions(expression: ScExpression) = {
@tailrec
def separateConditions(expressions: List[ScExpression],
accumulator: List[ScExpression]): List[ScExpression] = expressions match {
case Nil => accumulator
case head :: tail =>
val (newExpressions, newAccumulator) = head match {
case IsConjunction(left, right) => (left :: right :: tail, accumulator)
case ScParenthesisedExpr(infixExpression: ScInfixExpr) => (infixExpression :: tail, accumulator)
case ScParenthesisedExpr(call: ScGenericCall) => (tail, call :: accumulator)
case _ => (tail, head :: accumulator)
}
separateConditions(newExpressions, newAccumulator)
}
separateConditions(List(expression), Nil)
}
}
| JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/codeInspection/typeChecking/TypeCheckCanBeMatchInspection.scala | Scala | apache-2.0 | 15,225 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.samza.storage.kv
import org.apache.samza.metrics.{MetricsHelper, MetricsRegistry, MetricsRegistryMap}
class KeyValueStoreMetrics(
val storeName: String = "unknown",
val registry: MetricsRegistry = new MetricsRegistryMap) extends MetricsHelper {
val gets = newCounter("gets")
val getAlls = newCounter("getAlls")
val puts = newCounter("puts")
val putAlls = newCounter("putAlls")
val deletes = newCounter("deletes")
val deleteAlls = newCounter("deleteAlls")
val alls = newCounter("alls")
val ranges = newCounter("ranges")
val flushes = newCounter("flushes")
val bytesWritten = newCounter("bytes-written")
val bytesRead = newCounter("bytes-read")
override def getPrefix = storeName + "-"
}
| prateekm/samza | samza-kv/src/main/scala/org/apache/samza/storage/kv/KeyValueStoreMetrics.scala | Scala | apache-2.0 | 1,546 |
package com.arcusys.valamis.lesson.scorm.service.sequencing
import com.arcusys.valamis.lesson.scorm.model.sequencing.{ NavigationRequestType, NavigationResponseInvalid, NavigationResponseWithoutTermination }
@org.junit.runner.RunWith(classOf[org.scalatest.junit.JUnitRunner])
class StartNavigationRequestTest extends NavigationRequestServiceTestBase(NavigationRequestType.Start) {
"Start navigation request" should "succeed for no current activity (1.1.1)" in {
expectResult(NavigationResponseWithoutTermination, rootOnlyTree())
}
it should "fail for defined current activity (1.2.1)" in {
expectResult(NavigationResponseInvalid, rootOnlyTree(hasCurrent = true))
}
} | ViLPy/Valamis | valamis-scorm-lesson/src/test/scala/com/arcusys/valamis/lesson/scorm/service/sequencing/StartNavigationRequestTest.scala | Scala | lgpl-3.0 | 685 |
package com.yannick_cw.elastic_indexer4s
import akka.NotUsed
import akka.actor.ActorSystem
import akka.stream.Supervision.Decider
import akka.stream.scaladsl.Source
import akka.stream.{ActorMaterializer, ActorMaterializerSettings}
import com.yannick_cw.elastic_indexer4s.elasticsearch.elasic_config.ElasticWriteConfig
import com.sksamuel.elastic4s.Indexable
import cats.instances.future.catsStdInstancesForFuture
import com.sksamuel.elastic4s.streams.RequestBuilder
import com.yannick_cw.elastic_indexer4s.elasticsearch.{ElasticseachInterpreter, EsAccess}
import com.yannick_cw.elastic_indexer4s.indexing_logic.{IndexOps, IndexableStream, IndexingWithEs}
import com.sksamuel.elastic4s.http.ElasticDsl._
import scala.concurrent.{ExecutionContext, Future}
class ElasticIndexer4s(esConf: ElasticWriteConfig)(implicit system: ActorSystem,
materializer: ActorMaterializer,
ex: ExecutionContext) {
/**
* Creates a IndexableStream from a Source of elements and an Indexable for the element type
*/
def from[Entity: Indexable](source: Source[Entity, NotUsed]): IndexableStream[Entity, Future] = {
implicit val defaultBuilder: RequestBuilder[Entity] =
(entity: Entity) => indexInto(esConf.indexName / esConf.docType) source entity
implicit val esAccess: EsAccess[Future] = new ElasticseachInterpreter(esConf)
implicit val indexOps: IndexOps[Future] = new IndexingWithEs[Future]()
new IndexableStream[Entity, Future](source, () => Future(esConf.client.close()))
}
/**
* Creates a IndexableStream from a Source of elements and a requestBuilder for the element type
* This can be used for additional configuration on how to index elements
*/
def fromBuilder[Entity: RequestBuilder](source: Source[Entity, NotUsed]): IndexableStream[Entity, Future] = {
implicit val esAccess: EsAccess[Future] = new ElasticseachInterpreter(esConf)
implicit val indexOps: IndexOps[Future] = new IndexingWithEs[Future]()
new IndexableStream[Entity, Future](source, () => Future(esConf.client.close()))
}
def withDecider(decider: Decider): ElasticIndexer4s = {
val materializer = ActorMaterializer(ActorMaterializerSettings(system).withSupervisionStrategy(decider))
new ElasticIndexer4s(esConf)(system, materializer, ex)
}
}
object ElasticIndexer4s {
def apply(esConf: ElasticWriteConfig,
system: ActorSystem,
materializer: ActorMaterializer,
ex: ExecutionContext): ElasticIndexer4s =
new ElasticIndexer4s(esConf)(system, materializer, ex)
def apply(esConf: ElasticWriteConfig): ElasticIndexer4s = {
implicit val system = ActorSystem()
implicit val ex = system.dispatcher
implicit val materializer = ActorMaterializer()
new ElasticIndexer4s(esConf)
}
}
| yannick-cw/elastic-indexer4s | src/main/scala/com/yannick_cw/elastic_indexer4s/ElasticIndexer4s.scala | Scala | mit | 2,888 |
/*
* Copyright 2016 Coral realtime streaming analytics (http://coral-streaming.github.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.coral.lib
import java.util.Properties
import kafka.consumer._
import kafka.message.MessageAndMetadata
import org.json4s.JsonAST.{JNothing, JValue}
import org.json4s.jackson.JsonMethods._
import org.mockito.Mockito._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{Matchers, WordSpec}
class KafkaJsonConsumerSpec extends WordSpec with Matchers with MockitoSugar {
"KafkaJsonConsumer" should {
"provide a stream" in {
val consumer = KafkaJsonConsumer()
intercept[IllegalArgumentException] {
consumer.stream("abc", new Properties())
}
}
}
"KafkaJsonStream" should {
val fakeConnection = mock[ConsumerConnector]
doNothing.when(fakeConnection).commitOffsets
val fakeMessage = mock[MessageAndMetadata[Array[Byte], JValue]]
when(fakeMessage.key()).thenReturn("TestKey".getBytes)
when(fakeMessage.message()).thenReturn(parse( """{ "json": "test" }"""))
val fakeIterator = mock[ConsumerIterator[Array[Byte], JValue]]
when(fakeIterator.hasNext()).thenReturn(true).thenReturn(false)
when(fakeIterator.next()).thenReturn(fakeMessage)
val fakeStream = mock[KafkaStream[Array[Byte], JValue]]
when(fakeStream.iterator()).thenReturn(fakeIterator)
"provide a next value" in {
val kjs = new KafkaJsonStream(fakeConnection, fakeStream)
kjs.hasNextInTime shouldBe true
kjs.next shouldBe parse( """{ "json": "test" }""")
}
}
"JsonDecoder" should {
"convert bytes to Json object" in {
val jsonString = """{ "hello": "json" }"""
val bytes = jsonString.getBytes
val jsonValue = parse(jsonString)
JsonDecoder.fromBytes(bytes) shouldBe jsonValue
}
"return JNothing for invalid JSon" in {
val jsonString = """hello"""
val bytes = jsonString.getBytes
JsonDecoder.fromBytes(bytes) shouldBe JNothing
}
}
} | coral-streaming/coral | src/test/scala/io/coral/lib/KafkaJsonConsumerSpec.scala | Scala | apache-2.0 | 2,457 |
/*
* Copyright 2008-present MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.mongodb.scala
import com.mongodb.Block
import scala.collection.JavaConverters._
import org.bson.BsonDocument
import com.mongodb.async.client.{MongoClient => JMongoClient}
import com.mongodb.connection.ClusterSettings
import org.scalamock.scalatest.proxy.MockFactory
import org.scalatest.{FlatSpec, Matchers}
class MongoClientSpec extends FlatSpec with Matchers with MockFactory {
val wrapped = mock[JMongoClient]
val clientSession = mock[ClientSession]
val mongoClient = new MongoClient(wrapped)
def observer[T] = new Observer[T]() {
override def onError(throwable: Throwable): Unit = {}
override def onSubscribe(subscription: Subscription): Unit = subscription.request(Long.MaxValue)
override def onComplete(): Unit = {}
override def onNext(doc: T): Unit = {}
}
"MongoClient" should "have the same methods as the wrapped MongoClient" in {
val wrapped = classOf[JMongoClient].getMethods.map(_.getName)
val local = classOf[MongoClient].getMethods.map(_.getName)
wrapped.foreach((name: String) => {
val cleanedName = name.stripPrefix("get")
assert(local.contains(name) | local.contains(cleanedName.head.toLower + cleanedName.tail))
})
}
it should "default to localhost:27107" in {
val serverAddress = new ServerAddress("localhost", 27017)
val mongoClient = MongoClient()
mongoClient.settings.getClusterSettings.getHosts.asScala.head shouldBe serverAddress
}
it should "apply read preference from connection string to settings" in {
MongoClient("mongodb://localhost/").settings.getReadPreference should equal(ReadPreference.primary())
MongoClient("mongodb://localhost/?readPreference=secondaryPreferred").settings.getReadPreference should equal(ReadPreference.secondaryPreferred())
}
it should "apply read concern from connection string to settings" in {
MongoClient("mongodb://localhost/").settings.getReadConcern should equal(ReadConcern.DEFAULT)
MongoClient("mongodb://localhost/?readConcernLevel=local").settings.getReadConcern should equal(ReadConcern.LOCAL)
}
it should "apply write concern from connection string to settings" in {
MongoClient("mongodb://localhost/").settings.getWriteConcern should equal(WriteConcern.ACKNOWLEDGED)
MongoClient("mongodb://localhost/?w=majority").settings.getWriteConcern should equal(WriteConcern.MAJORITY)
}
it should "accept MongoClientSettings" in {
val serverAddress = new ServerAddress("localhost", 27020)
val clusterSettings = ClusterSettings.builder().hosts(List(serverAddress).asJava).build()
val mongoClient = MongoClient(MongoClientSettings.builder()
.applyToClusterSettings(new Block[ClusterSettings.Builder] {
override def apply(b: ClusterSettings.Builder): Unit = b.applySettings(clusterSettings)
}).build())
mongoClient.settings.getClusterSettings.getHosts.get(0) shouldBe serverAddress
}
it should "accept MongoDriverInformation" in {
val driverInformation = MongoDriverInformation.builder().driverName("test").driverVersion("1.2.0").build()
MongoClient("mongodb://localhost", Some(driverInformation))
}
it should "call the underlying getSettings" in {
wrapped.expects(Symbol("getSettings"))().once()
mongoClient.settings
}
it should "call the underlying getDatabase" in {
wrapped.expects(Symbol("getDatabase"))("dbName").once()
mongoClient.getDatabase("dbName")
}
it should "call the underlying close" in {
wrapped.expects(Symbol("close"))().once()
mongoClient.close()
}
it should "call the underlying startSession" in {
val clientSessionOptions = ClientSessionOptions.builder.build()
wrapped.expects(Symbol("startSession"))(clientSessionOptions, *).once()
mongoClient.startSession(clientSessionOptions).subscribe(observer[ClientSession])
}
it should "call the underlying listDatabases[T]" in {
wrapped.expects(Symbol("listDatabases"))(classOf[Document]).once()
wrapped.expects(Symbol("listDatabases"))(clientSession, classOf[Document]).once()
wrapped.expects(Symbol("listDatabases"))(classOf[BsonDocument]).once()
wrapped.expects(Symbol("listDatabases"))(clientSession, classOf[BsonDocument]).once()
mongoClient.listDatabases()
mongoClient.listDatabases(clientSession)
mongoClient.listDatabases[BsonDocument]()
mongoClient.listDatabases[BsonDocument](clientSession)
}
it should "call the underlying listDatabaseNames" in {
wrapped.expects(Symbol("listDatabaseNames"))().once()
wrapped.expects(Symbol("listDatabaseNames"))(clientSession).once()
mongoClient.listDatabaseNames()
mongoClient.listDatabaseNames(clientSession)
}
it should "call the underlying watch" in {
val pipeline = List(Document("$match" -> 1))
wrapped.expects(Symbol("watch"))(classOf[Document]).once()
wrapped.expects(Symbol("watch"))(pipeline.asJava, classOf[Document]).once()
wrapped.expects(Symbol("watch"))(pipeline.asJava, classOf[BsonDocument]).once()
wrapped.expects(Symbol("watch"))(clientSession, pipeline.asJava, classOf[Document]).once()
wrapped.expects(Symbol("watch"))(clientSession, pipeline.asJava, classOf[BsonDocument]).once()
mongoClient.watch() shouldBe a[ChangeStreamObservable[_]]
mongoClient.watch(pipeline) shouldBe a[ChangeStreamObservable[_]]
mongoClient.watch[BsonDocument](pipeline) shouldBe a[ChangeStreamObservable[_]]
mongoClient.watch(clientSession, pipeline) shouldBe a[ChangeStreamObservable[_]]
mongoClient.watch[BsonDocument](clientSession, pipeline) shouldBe a[ChangeStreamObservable[_]]
}
}
| rozza/mongo-scala-driver | driver/src/test/scala/org/mongodb/scala/MongoClientSpec.scala | Scala | apache-2.0 | 6,214 |
// See LICENSE for license details.
package sifive.blocks.devices.gpio
import freechips.rocketchip.config.Field
import freechips.rocketchip.diplomacy._
import freechips.rocketchip.subsystem.BaseSubsystem
case object PeripheryGPIOKey extends Field[Seq[GPIOParams]](Nil)
trait HasPeripheryGPIO { this: BaseSubsystem =>
val (gpioNodes, iofNodes) = p(PeripheryGPIOKey).map { ps =>
val gpio = GPIOAttachParams(ps).attachTo(this)
(gpio.ioNode.makeSink(), gpio.iofNode.map { _.makeSink() })
}.unzip
}
trait HasPeripheryGPIOBundle {
val gpio: Seq[GPIOPortIO]
val iof: Seq[Option[IOFPortIO]]
}
trait HasPeripheryGPIOModuleImp extends LazyModuleImp with HasPeripheryGPIOBundle {
val outer: HasPeripheryGPIO
val gpio = outer.gpioNodes.zipWithIndex.map { case(n,i) => n.makeIO()(ValName(s"gpio_$i")) }
val iof = outer.iofNodes.zipWithIndex.map { case(o,i) => o.map { n => n.makeIO()(ValName(s"iof_$i")) } }
}
| sifive/sifive-blocks | src/main/scala/devices/gpio/GPIOPeriphery.scala | Scala | apache-2.0 | 924 |
package com.gu.mobile.notifications.football.lib
import okhttp3._
import scala.concurrent.{ExecutionContext, Future}
class NotificationHttpProvider(implicit ec: ExecutionContext) extends HttpProvider {
val httpClient = new OkHttpClient
override def post(uri: String, apiKey: String, contentType: ContentType, body: Array[Byte]): Future[HttpResponse] = {
val authHeader = s"Bearer $apiKey"
val mediaType = MediaType.parse(s"${contentType.mediaType}; charset=${contentType.charset}")
val requestBody = RequestBody.create(mediaType, body)
val httpRequest = new Request.Builder()
.url(uri)
.header("Authorization", authHeader)
.post(requestBody)
.build()
val httpResponse = httpClient.newCall(httpRequest).execute()
Future.successful(extract(httpResponse))
}
override def get(uri: String): Future[HttpResponse] = {
val httpRequest = new Request.Builder().url(uri).build
val httpResponse = httpClient.newCall(httpRequest).execute
Future.successful(extract(httpResponse))
}
private def extract(response: Response) = {
if (response.code >= 200 && response.code < 300)
HttpOk(response.code, response.body.string)
else
HttpError(response.code, response.body.string)
}
} | guardian/mobile-n10n | football/src/main/scala/com/gu/mobile/notifications/football/lib/NotificationHttpProvider.scala | Scala | apache-2.0 | 1,261 |
package skadi.container.processing
import org.junit.Assert._
import org.junit.Test
import skadi.beans.Bean
import skadi.container.Container
class EagerLoaderTest {
@Test
def testProcess {
val beans = List(
new Bean named 'abstract
implementedWith classOf[AbstractBean],
new Bean named 'singleton
implementedWith classOf[SingletonBean]
scopedAsSingleton,
new Bean named 'prototype
implementedWith classOf[PrototypeBean]
scopedAsPrototype,
new Bean named 'eager
implementedWith classOf[EagerBean],
new Bean named 'lazy
implementedWith classOf[LazyBean]
loadLazily
)
val container = new Container(beans)
assertFalse(Context.abstractLoaded)
assertTrue(Context.singletonLoaded)
assertFalse(Context.prototypeLoaded)
assertTrue(Context.eagerLoaded)
assertFalse(Context.lazyLoaded)
}
}
object Context {
var abstractLoaded = false
var singletonLoaded = false
var prototypeLoaded = false
var eagerLoaded = false
var lazyLoaded = false
}
abstract class AbstractBean {
Context.abstractLoaded = true
}
class SingletonBean {
Context.singletonLoaded = true
}
class PrototypeBean {
Context.prototypeLoaded = true
}
class EagerBean {
Context.eagerLoaded = true
}
class LazyBean {
Context.lazyLoaded = true
}
| nmilinkovic/Skadi | src/test/scala/skadi/container/processing/EagerLoaderTest.scala | Scala | bsd-3-clause | 1,354 |
package rml.args.run
import java.io.File
import scala.reflect.io.Path.string2path
import com.typesafe.scalalogging.LazyLogging
import rml.args.arg.{/ => /}
import rml.args.arg.Func
import rml.args.config.reader.ConfReader
import rml.args.help.DefaultHelpSetup
import rml.args.help.HelpFunctions
import rml.args.jline.DefaultJlineSetup
import rml.args.logging.DefaultLogSetup
import rml.args.arg.function.FunctionOrigin
import rml.args.register.@@
object DefaultSetup extends LazyLogging {
implicit val origin = FunctionOrigin("DefaultSetup")
def apply(prefix: String, systemPrefix: String = "@") = {
DefaultLogSetup(prefix, systemPrefix)
logger.debug("Default setup with prefix '{}'", prefix)
DefaultJlineSetup(prefix, systemPrefix)
DefaultHelpSetup(prefix, systemPrefix)
@@(systemPrefix + "conf", "Show configuration settings") -->
Func{
val fullConfig = ConfReader(Array[String](), prefix, "conf")
HelpFunctions().printFullConf(fullConfig)
}
@@(systemPrefix + "conf files", "Show configuration files") -->
Func{
println("Default config files:\nFound File")
ConfReader.defaultConfFilePaths(prefix).foreach(p => println("[" + (if(new File(p).exists) "X" else " ") + "] " + p))
}
@@(systemPrefix + "version", "Show scala_args version") -->
Func{
val scalaArgsPackage = getClass.getPackage
println(scalaArgsPackage.getImplementationTitle + ": " + scalaArgsPackage.getImplementationVersion)
}
}
} | rml/scala_args | src/main/scala/rml/args/run/DefaultSetup.scala | Scala | gpl-3.0 | 1,557 |
/*******************************************************************************
Copyright (c) 2013, KAIST, S-Core.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.analysis.typing.models.DOMHtml5
import scala.collection.mutable.{Map=>MMap, HashMap=>MHashMap}
import kr.ac.kaist.jsaf.analysis.typing.domain._
import kr.ac.kaist.jsaf.analysis.typing.domain.{BoolFalse => F, BoolTrue => T}
import kr.ac.kaist.jsaf.analysis.typing.models._
import kr.ac.kaist.jsaf.analysis.cfg.{CFG, CFGExpr}
import kr.ac.kaist.jsaf.analysis.typing.{ControlPoint, Helper, PreHelper, Semantics}
import kr.ac.kaist.jsaf.analysis.typing.domain.Heap
import kr.ac.kaist.jsaf.analysis.typing.domain.Context
import kr.ac.kaist.jsaf.analysis.typing.models.AbsBuiltinFunc
import kr.ac.kaist.jsaf.analysis.typing.models.AbsConstValue
import scala.Some
import kr.ac.kaist.jsaf.analysis.typing.AddressManager._
// Modeled based on WHATWG HTML Living Standard
// Section 6.5.2 The History Interface.
object History extends DOM {
private val name = "History"
/* predefined locations */
val loc_ins = newSystemRecentLoc(name + "Ins")
val loc_proto = newSystemRecentLoc(name + "Proto")
/* instant object */
private val prop_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(loc_proto), F, F, F)))),
("@extensible", AbsConstValue(PropValue(BoolTrue))),
// property
("length", AbsConstValue(PropValue(ObjectValue(Value(UInt), F, T, T))))
//TODO : 'state'
)
/* prorotype */
private val prop_proto: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(ObjProtoLoc, F, F, F)))),
("@extensible", AbsConstValue(PropValue(BoolTrue))),
// API
("go", AbsBuiltinFunc("History.go", 1)),
("back", AbsBuiltinFunc("History.back", 0)),
("forward", AbsBuiltinFunc("History.forward", 0)),
("pushState", AbsBuiltinFunc("History.pushState", 3)),
("replaceState", AbsBuiltinFunc("History.replaceState", 3))
)
/* no constructor */
/* initial property list */
def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List(
(loc_ins, prop_ins), (loc_proto, prop_proto)
)
def getSemanticMap(): Map[String, SemanticFun] = {
Map(
("History.go" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
((Helper.ReturnStore(h, Value(UndefTop)), ctx), (he, ctxe))
})),
("History.back" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
((Helper.ReturnStore(h, Value(UndefTop)), ctx), (he, ctxe))
})),
("History.forward" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
((Helper.ReturnStore(h, Value(UndefTop)), ctx), (he, ctxe))
}))
// TODO : "History.pushState", "History.replaceState"
)
}
def getPreSemanticMap(): Map[String, SemanticFun] = {
Map(
)
}
def getDefMap(): Map[String, AccessFun] = {
Map(
)
}
def getUseMap(): Map[String, AccessFun] = {
Map(
)
}
/* instance */
def getInstance(): Option[Loc] = Some (loc_ins)
}
| daejunpark/jsaf | src/kr/ac/kaist/jsaf/analysis/typing/models/DOMHtml5/History.scala | Scala | bsd-3-clause | 3,669 |
// filter: (\\s*)at(.*)
import scala.tools.partest._
object Test extends DirectTest {
override def extraSettings: String = "-usejavacp -Vdebug-type-error"
def code: String = ""
def noSuchType: String = """
object Example
{
val a: org.dummy.Dummy = ???
}
"""
def show(): Unit = {
val global = newCompiler()
def run(code: String): Unit =
compileString(global)(code.trim)
run(noSuchType)
}
}
| scala/scala | test/files/run/debug-type-error.scala | Scala | apache-2.0 | 427 |
package provingground.fol
/** Logical terms */
trait Term extends Expression {
/** Formula giving equality */
def eqls(that: Term) = Eq(this, that)
/** TermListitutes variables by terms*/
def subs(xt: Var => Term): Term
/** Single variable substituted by a term */
def subs(x: Var, t: Term): Term = {
val xt: (Var => Term) = (y: Var) => if (y == x) t else y
subs(xt)
}
/** Formal + operation */
def +(that: Term) = BinOp("+")(this, that)
/** Formal -(binary) operation */
def -(that: Term) = BinOp("-")(this, that)
/** Formal * operation */
def *(that: Term) = BinOp("*")(this, that)
/** Formal / operation */
def /(that: Term) = BinOp("/")(this, that)
/** Formal ** operation */
def **(that: Term) = BinOp("**")(this, that)
/** Formal | operation */
def |(that: Term) = BinOp("|")(this, that)
/** Formal unary - operation */
def unary_- = UnOp("-")(this)
/** Formal < relation */
def <(that: Term): Formula = BinRel("<")(this, that)
/** Formal > relation */
def >(that: Term): Formula = BinRel(">")(this, that)
/** Formal < relation */
def <<(that: Term): Formula = BinRel("<")(this, that)
/** Formal > relation */
def >>(that: Term): Formula = BinRel(">")(this, that)
/** Formal = relation */
def =:=(that: Term): Formula = BinRel("=")(this, that)
/** Formal <= relation */
def <=(that: Term): Formula = BinRel("<=")(this, that)
/** Formal >= relation */
def >=(that: Term): Formula = BinRel(">=")(this, that)
/** Formal ~ relation */
def ~(that: Term): Formula = BinRel("~")(this, that)
def apply(b: BinRel) = (that: Term) => b(this, that)
def is(u: UnRel) = u(this)
}
/** Logical Variable */
class Var extends Term {
val freeVars = Set(this)
def subs(xt: Var => Term): Term = xt(this)
}
object Var {
/** Logical Variable determined by Name */
/** stream of Variables starting with Var("a") */
val varstream: LazyList[Var] =
(LazyList.from(0)) map ((x: Int) => VarSym((x + 'a').toChar.toString))
}
case class VarSym(name: String) extends Var {
override def toString = name
}
/** Logical constants */
trait Const extends Term with LanguageParam {
override val freeVars: Set[Var] = Set()
override def subs(xt: Var => Term) = this
}
/** Constants given by name */
case class ConstSym(name: String) extends Const
/** Integer constant */
case class IntConst(value: Long) extends Const {
override def toString = value.toString
}
/** Unparsed term formally wrapped */
case class TermFmla(name: String) extends Term {
override def toString = name
val freeVars: Set[Var] = Set()
def subs(xt: Var => Term): Term = this
}
/** Recursive term */
case class RecTerm(f: Func, params: List[Term]) extends Term {
def this(f: Func, t: Term) = this(f, List(t))
override def toString = f match {
case FuncSym(name, _) => name+params.mkString("(", ", ", ")")
case BinOp(name) => params.head.toString + name + params.last.toString
case _ => super.toString
}
val freeVars: Set[Var] = (params map (_.freeVars)) reduce (_ union _)
def subs(xt: Var => Term): Term = RecTerm(f, params map (_.subs(xt)))
}
| siddhartha-gadgil/ProvingGround | fol/src/main/scala/provingground/fol/Term.scala | Scala | mit | 3,220 |
package colossus.testkit
import java.awt.datatransfer.UnsupportedFlavorException
import colossus.service.{UnmappedCallback, Callback}
import scala.concurrent.duration._
import scala.util.Try
class CallbackMatchersSpec extends ColossusSpec {
import CallbackMatchers._
val duration = 1.second
implicit val cbe = FakeIOSystem.testExecutor
"A CallbackMatcher" should {
"fail to match if a Callback never executes" in {
def cbFunc(f: Try[Int] => Unit) {}
val unmapped = UnmappedCallback(cbFunc)
var execd = false
val result = new CallbackEvaluateTo[Int](duration, a => execd = true).apply(unmapped)
result.matches must equal(false)
execd must equal(false)
}
"fail to match if the 'evaluate' function throws" in {
var execd = false
val cb = Callback.successful("success!")
val eval = (a: String) => {
execd = true
a must equal("awesome!")
}
val result = new CallbackEvaluateTo[String](duration, eval).apply(cb)
result.matches must equal(false)
execd must equal(true)
}
"fail to match if a Callback reports a failure" in {
val cb = Callback.failed(new Exception("bam"))
var execd = false
val result = new CallbackEvaluateTo[Any](duration, a => execd = true).apply(cb)
result.matches must equal(false)
execd must equal(false)
}
"success if the callback successfully executes the evaluate function" in {
var execd = false
val cb = Callback.successful("success!")
val eval = (a: String) => {
execd = true
a must equal("success!")
}
val result = new CallbackEvaluateTo[String](duration, eval).apply(cb)
result.matches must equal(true)
execd must equal(true)
}
"report the error for a failed callback" in {
val cb = Callback.successful("YAY").map { t =>
throw new Exception("NAY")
}
var execd = false
val eval = (a: String) => {
execd = true
a must equal("YAY")
}
val result = new CallbackEvaluateTo[String](duration, eval).apply(cb)
result.matches must equal(false)
execd must equal(false)
result.failureMessage.contains("NAY") must equal(true)
}
}
"A CallbackFailTo matcher" should {
"fail to match if the 'evaluate' function throws" in {
var execd = false
val cb = Callback.failed(new Exception("D'OH!"))
val eval = (a: Throwable) => {
execd = true
a must equal("awesome!")
}
val result = new CallbackFailTo[String, Throwable](duration, eval).apply(cb)
result.matches must equal(false)
execd must equal(true)
}
"fail to match if the Callback executes successfully" in {
var execd = false
val cb = Callback.successful("success!")
val eval = (a: Throwable) => {
execd = true
a must not be null
}
val result = new CallbackFailTo[String, Throwable](duration, eval).apply(cb)
result.matches must equal(false)
execd must equal(false)
}
"fail to match if a Callback throws the wrong exception" in {
var execd = false
val cb = Callback.failed(new IllegalArgumentException("D'OH!"))
val eval = (a: UnsupportedFlavorException) => {
execd = true
}
val result = new CallbackFailTo[String, UnsupportedFlavorException](duration, eval).apply(cb)
result.matches must equal(false)
execd must equal(false)
}
"match if a Callback fails" in {
var execd = false
val cb = Callback.failed(new Exception("D'OH!"))
val eval = (a: Throwable) => {
execd = true
}
val result = new CallbackFailTo[String, Throwable](duration, eval).apply(cb)
result.matches must equal(true)
execd must equal(true)
}
}
}
| tumblr/colossus | colossus-testkit/src/test/scala/colossus.testkit/CallbackMatchersSpec.scala | Scala | apache-2.0 | 3,876 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2017 Γngel Cervera Claudio
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package com.acervera.osm4scala
import java.io.{FileInputStream, InputStream}
import org.scalatest.WordSpec
class BlobTupleIteratorSpec extends WordSpec {
"The BlobTupleIterator should" should {
"Read three pairs" in {
val testFile = "core/src/test/resources/com/acervera/osm4scala/fileblock/three_blocks.osm.pbf"
var counter = 0
var pbfIS: InputStream = null
try {
pbfIS = new FileInputStream(testFile)
BlobTupleIterator fromPbf(pbfIS) foreach(x => counter += 1)
assert(counter == 3, "There are 3 blocks!")
} finally {
if (pbfIS != null) pbfIS.close()
}
}
"Read ten pairs" in {
val testFile = "core/src/test/resources/com/acervera/osm4scala/fileblock/ten_blocks.osm.pbf"
var counter = 0
var pbfIS: InputStream = null
try {
pbfIS = new FileInputStream(testFile)
BlobTupleIterator fromPbf(pbfIS) foreach(x => counter += 1)
assert(counter == 10, "There are 10 blocks!")
} finally {
if (pbfIS != null) pbfIS.close()
}
}
}
}
| angelcervera/pbf4scala | core/src/test/scala/com/acervera/osm4scala/BlobTupleIteratorSpec.scala | Scala | mit | 2,240 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.rules.datastream
import org.apache.calcite.plan.{RelOptRule, RelOptRuleCall, RelTraitSet}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.convert.ConverterRule
import org.apache.calcite.rel.logical.LogicalAggregate
import org.apache.flink.table.api.TableException
import org.apache.flink.table.plan.nodes.FlinkConventions
import org.apache.flink.table.plan.nodes.datastream.DataStreamGroupAggregate
import org.apache.flink.table.plan.nodes.logical.FlinkLogicalAggregate
import org.apache.flink.table.plan.schema.RowSchema
import scala.collection.JavaConversions._
/**
* Rule to convert a [[LogicalAggregate]] into a [[DataStreamGroupAggregate]].
*/
class DataStreamGroupAggregateRule
extends ConverterRule(
classOf[FlinkLogicalAggregate],
FlinkConventions.LOGICAL,
FlinkConventions.DATASTREAM,
"DataStreamGroupAggregateRule") {
override def matches(call: RelOptRuleCall): Boolean = {
val agg: FlinkLogicalAggregate = call.rel(0).asInstanceOf[FlinkLogicalAggregate]
// check if we have grouping sets
val groupSets = agg.getGroupSets.size() != 1 || agg.getGroupSets.get(0) != agg.getGroupSet
if (groupSets || agg.indicator) {
throw new TableException("GROUPING SETS are currently not supported.")
}
!groupSets && !agg.indicator
}
override def convert(rel: RelNode): RelNode = {
val agg: FlinkLogicalAggregate = rel.asInstanceOf[FlinkLogicalAggregate]
val traitSet: RelTraitSet = rel.getTraitSet.replace(FlinkConventions.DATASTREAM)
val convInput: RelNode = RelOptRule.convert(agg.getInput, FlinkConventions.DATASTREAM)
new DataStreamGroupAggregate(
rel.getCluster,
traitSet,
convInput,
agg.getNamedAggCalls,
new RowSchema(rel.getRowType),
new RowSchema(agg.getInput.getRowType),
agg.getGroupSet.toArray)
}
}
object DataStreamGroupAggregateRule {
val INSTANCE: RelOptRule = new DataStreamGroupAggregateRule
}
| hequn8128/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/plan/rules/datastream/DataStreamGroupAggregateRule.scala | Scala | apache-2.0 | 2,799 |
import java.time.LocalDateTime
import java.time.format.DateTimeFormatter
import com.spotify.docker.client.DefaultDockerClient
import sbt.Keys._
import sbt.Tests.Group
import sbt._
import scala.util.control.NonFatal
// Separate projects for integration tests because of IDEA: https://youtrack.jetbrains.com/issue/SCL-14363#focus=streamItem-27-3061842.0-0
object IntegrationTestsPlugin extends AutoPlugin {
object autoImport extends ItKeys
import autoImport._
override def projectSettings: Seq[Def.Setting[_]] =
inConfig(Test)(
Seq(
logDirectory := {
val runId = Option(System.getenv("RUN_ID")).getOrElse {
val formatter = DateTimeFormatter.ofPattern("MM-dd--HH_mm_ss")
formatter.format(LocalDateTime.now()) // git branch?
}
val r = target.value / "logs" / runId
IO.createDirectory(r)
r
},
// Example: SCALATEST_EXCLUDE_TAGS="package1.Tag1 package2.Tag2 package3.Tag3"
testOptions += {
val excludeTags = sys.env.get("SCALATEST_EXCLUDE_TAGS").fold(Seq.empty[String])(Seq("-l", _))
/* http://www.scalatest.org/user_guide/using_the_runner
* f - select the file reporter with output directory
* F - show full stack traces
* W - without color
*/
val args = Seq("-fFW", (logDirectory.value / "summary.log").toString) ++ excludeTags
Tests.Argument(TestFrameworks.ScalaTest, args: _*)
},
parallelExecution := true,
testGrouping := {
// ffs, sbt!
// https://github.com/sbt/sbt/issues/3266
val javaHomeValue = javaHome.value
val logDirectoryValue = logDirectory.value
val envVarsValue = envVars.value
val javaOptionsValue = javaOptions.value
for {
group <- testGrouping.value
suite <- group.tests
} yield Group(
suite.name,
Seq(suite),
Tests.SubProcess(
ForkOptions(
javaHome = javaHomeValue,
outputStrategy = outputStrategy.value,
bootJars = Vector.empty[java.io.File],
workingDirectory = Option(baseDirectory.value),
runJVMOptions = Vector(
"-Dwaves.it.logging.appender=FILE",
s"-Dwaves.it.logging.dir=${logDirectoryValue / suite.name.replaceAll("""(\\w)\\w*\\.""", "$1.")}" // foo.bar.Baz -> f.b.Baz
) ++ javaOptionsValue,
connectInput = false,
envVars = envVarsValue
)
)
)
}
)
) ++ inScope(Global)(
Seq(
maxParallelSuites := Option(Integer.getInteger("waves.it.max-parallel-suites"))
.getOrElse[Integer] {
try {
val docker = DefaultDockerClient.fromEnv().build()
try {
val dockerCpu: Int = docker.info().cpus()
sLog.value.info(s"Docker CPU count: $dockerCpu")
dockerCpu * 2
} finally docker.close()
} catch {
case NonFatal(_) =>
sLog.value.warn(s"Could not connect to Docker, is the daemon running?")
sLog.value.info(s"System CPU count: ${EvaluateTask.SystemProcessors}")
EvaluateTask.SystemProcessors
}
},
concurrentRestrictions := Seq(
Tags.limit(Tags.ForkedTestGroup, maxParallelSuites.value)
)
)
)
}
trait ItKeys {
val logDirectory = taskKey[File]("The directory where logs of integration tests are written")
val maxParallelSuites = settingKey[Int]("Number of test suites to run in parallel")
}
| wavesplatform/Waves | project/IntegrationTestsPlugin.scala | Scala | mit | 3,775 |
package com.twitter.finagle.redis
import com.twitter.finagle.{Service, ClientConnection, ServiceFactory, ServiceProxy}
import com.twitter.finagle.redis.exp.{RedisPool, SubscribeCommands}
import com.twitter.finagle.redis.protocol._
import com.twitter.finagle.util.DefaultTimer
import com.twitter.io.Buf
import com.twitter.util.{Closable, Future, Time, Timer}
object Client {
/**
* Construct a client from a single host.
* @param host a String of host:port combination.
*/
def apply(host: String): Client =
new Client(com.twitter.finagle.Redis.client.newClient(host))
/**
* Construct a client from a single Service.
*/
def apply(raw: ServiceFactory[Command, Reply]): Client =
new Client(raw)
}
class Client(
override val factory: ServiceFactory[Command, Reply],
private[redis] val timer: Timer = DefaultTimer)
extends BaseClient(factory)
with NormalCommands
with SubscribeCommands
with Transactions
trait NormalCommands
extends KeyCommands
with StringCommands
with HashCommands
with SortedSetCommands
with ListCommands
with SetCommands
with BtreeSortedSetCommands
with TopologyCommands
with HyperLogLogCommands
with PubSubCommands
with ServerCommands
with ScriptCommands
with ConnectionCommands { self: BaseClient =>
}
trait Transactions { self: Client =>
private[this] def singletonFactory(): ServiceFactory[Command, Reply] =
new ServiceFactory[Command, Reply] {
val svc: Future[Service[Command, Reply]] = RedisPool.forTransaction(factory)
// Because the `singleton` is used in the context of a `FactoryToService` we override
// `Service#close` to ensure that we can control the checkout lifetime of the `Service`.
val proxiedService: Future[ServiceProxy[Command, Reply]] =
svc.map { underlying =>
new ServiceProxy(underlying) {
override def close(deadline: Time) = Future.Done
}
}
def apply(conn: ClientConnection) = proxiedService
def close(deadline: Time): Future[Unit] = svc.map(_.close(deadline))
}
def transaction[T](cmds: Seq[Command]): Future[Seq[Reply]] =
transactionSupport(_.transaction(cmds))
def transaction[T](f: NormalCommands => Future[_]): Future[Seq[Reply]] =
transactionSupport(_.transaction(f))
def transactionSupport[T](f: TransactionalClient => Future[T]): Future[T] = {
val singleton = singletonFactory()
val client = new TransactionalClient(singleton)
f(client).ensure {
client.reset().ensure(singleton.close())
}
}
}
/**
* Connects to a single Redis host
* @param factory: Finagle service factory object built with the Redis codec
*/
abstract class BaseClient(
protected val factory: ServiceFactory[Command, Reply])
extends Closable {
/**
* Releases underlying service factory object
*/
def close(deadline: Time): Future[Unit] = factory.close(deadline)
/**
* Helper function for passing a command to the service
*/
private[redis] def doRequest[T](cmd: Command)(handler: PartialFunction[Reply, Future[T]]): Future[T] = {
factory.toService.apply(cmd).flatMap (handler orElse {
case ErrorReply(message) => Future.exception(new ServerError(message))
case StatusReply("QUEUED") => Future.Done.asInstanceOf[Future[Nothing]]
case _ => Future.exception(new IllegalStateException)
})}
/**
* Helper function to convert a Redis multi-bulk reply into a map of pairs
*/
private[redis] def returnPairs[A](messages: Seq[A]) = {
assert(messages.length % 2 == 0, "Odd number of items in response")
messages.grouped(2).toSeq.flatMap {
case Seq(a, b) => Some((a, b))
case _ => None
}
}
}
object TransactionalClient {
/**
* Construct a client from a single host with transaction commands
* @param host a String of host:port combination.
*/
def apply(host: String): TransactionalClient =
new TransactionalClient(com.twitter.finagle.Redis.client.newClient(host))
/**
* Construct a client from a service factory
* @param raw ServiceFactory
*/
def apply(raw: ServiceFactory[Command, Reply]): TransactionalClient =
new TransactionalClient(raw)
}
/**
* Client connected over a single connection to a
* single redis instance, supporting transactions
*/
class TransactionalClient(factory: ServiceFactory[Command, Reply])
extends BaseClient(factory) with NormalCommands {
private[this] var _multi = false
private[this] var _watch = false
/**
* Flushes all previously watched keys for a transaction
*/
def unwatch(): Future[Unit] =
doRequest(UnWatch) {
case StatusReply(message) =>
_watch = false
Future.Unit
}
/**
* Marks given keys to be watched for conditional execution of a transaction
* @param keys to watch
*/
def watches(keys: Seq[Buf]): Future[Unit] =
doRequest(Watch(keys)) {
case StatusReply(message) =>
_watch = true
Future.Unit
}
def multi(): Future[Unit] =
doRequest(Multi) {
case StatusReply(message) =>
_multi = true
Future.Unit
}
def exec(): Future[Seq[Reply]] =
doRequest(Exec) {
case MBulkReply(messages) =>
_watch = false
_multi = false
Future.value(messages)
case EmptyMBulkReply =>
_watch = false
_multi = false
Future.Nil
case NilMBulkReply =>
_watch = false
_multi = false
Future.exception(new ServerError("One or more keys were modified before transaction"))
}
def discard(): Future[Unit] =
doRequest(Multi) {
case StatusReply(message) =>
_multi = false
_watch = false
Future.Unit
}
def transaction[T](cmds: Seq[Command]): Future[Seq[Reply]] = {
transaction {
cmds.iterator
.map(cmd => factory().flatMap(_(cmd)).unit)
.reduce(_ before _)
}
}
def transaction[T](f: => Future[_]): Future[Seq[Reply]] = {
multi() before {
f.unit before exec()
} ensure {
reset()
}
}
private[redis] def transaction[T](f: NormalCommands => Future[_]): Future[Seq[Reply]] = {
transaction(f(this))
}
private[redis] def reset(): Future[Unit] = {
if (_multi) discard()
else if (_watch) unwatch()
else Future.Done
}
}
| koshelev/finagle | finagle-redis/src/main/scala/com/twitter/finagle/redis/Client.scala | Scala | apache-2.0 | 6,338 |
package com.wordtrellis.scala
import scala.collection.immutable.List
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import scala.math.Ordered
/**
*
*
* @author Todd Cook
*
*/
class KeyCount[T](val key: T, val count: Int) extends Ordered[KeyCount[T]] {
// Note: compare, equals and hashCode should all be similar in there tests
def compare(that: KeyCount[T]): Int = {
if (count > that.count) 1
else if (count < that.count) -1
else 0
}
override def equals(other: Any): Boolean = other match {
case that: KeyCount[T] => this.key == that.key
case _ => false
}
override def hashCode: Int = key.hashCode
override def toString: String = s"$key $count"
}
class FrequencyMap[T](val items: List[T]) {
private var keyMap = new mutable.HashMap[T, Int]
/**
* Sample size is useful for limiting the size of frequency map entries by
* effectively truncating rare entries, if the FrequencyMap is growing to
* large.
* Usually this value is only necessary when generating a large key space
* e.g. random letter combos number four or greater.
*/
private var samplingSize = 0
def this() = this(List[T]()) // auxiliary constructor
def setSampleFiltration(sample: Int): Unit = {
samplingSize = sample
}
// convenience method; sometimes the number of occurrences is known; e.g. caching
def add(item: T, occurrences: Int): Unit = {
if (samplingSize != 0 && keyMap.size > samplingSize)
trimSamples()
if (!keyMap.contains(item))
keyMap += (item -> occurrences)
else
keyMap += (item -> (keyMap(item) + occurrences))
}
def trimSamples(): Unit = {
// TODO decide if filtration floor is helpful
// samplingSize= sample; filtrationFloor
val newKClist = getKeyCountList.slice(0, samplingSize)
val toRemove = getKeyCountList diff newKClist
toRemove.foreach(x => removeKey(x.key))
}
def removeKey(key: T): Option[Int] = keyMap.remove(key)
// returns a sorted list of KeyCounts
def getKeyCountList(): List[KeyCount[T]] = {
val cm = keyMap
val kcList = new ListBuffer[KeyCount[T]]()
for ((key, count: Int) <- cm)
kcList.append(new KeyCount(key, count))
kcList.toList.sortWith(_ > _)
}
def addAll(items: List[T]): Unit = {
items.foreach(x => add(x))
}
def add(item: T): Unit = {
if (samplingSize != 0 && keyMap.size > samplingSize)
trimSamples()
if (!keyMap.contains(item))
keyMap += (item -> 1)
else
keyMap += (item -> (keyMap(item) + 1))
}
// returns list of keys, sorted by frequency
def getKeyList: List[T] =
for { kc <- getKeyCountList().sortWith(_.count > _.count); k = kc.key } yield k
// returns list of values, sorted by frequency
def getValueList: List[Int] = for { kc <- getKeyCountList(); k = kc.count } yield k
def getKeyListFloor(floor: Int): List[T] = for { kc <- floorList(floor); k = kc.key } yield k
// items having floorVal count and above
def floorList(floorVal: Int): List[KeyCount[T]] = getKeyCountList filter (_.count >= floorVal)
def getKeyMap: mutable.HashMap[T, Int] = keyMap
def size(): Int = keyMap.size
def slice(start: Int, end: Int): List[KeyCount[T]] = getKeyCountList.slice(start, end)
override def toString: String = keyMap.toString
def toList(): List[String] = {
val lines = new ListBuffer[String]()
keyMap.keysIterator.toList.foreach(x => lines.append(s"$x : ${keyMap(x)}"))
lines.toList
}
/**
* Often a frequency map most useful as a map with probability distribution values;
* so that the total sum of the map of probabilities equals one or nearly so.
*/
def toProbabilityMap(): Map[T, Double] = {
val total: Double = keyMap.values.toList.sum * 1.0D
val (keys, values) = keyMap.toList.unzip
val newValues = values.map(x => x / total)
keys.zip(newValues).toMap
}
}
| todd-cook/Frequency-Analyzer | src/main/scala/com/wordtrellis/scala/FrequencyMap.scala | Scala | mit | 3,945 |
package com.mwronski.pathdependenttypes
/**
* Movie
* @param name name of the movie
*/
class Movie(name: String) {
/**
* Character that belongs to chosen movie
* @param name character name
*/
case class Character(name: String)
/**
* Create team from characters
* @param character1 some character
* @param character2 other character
* @return non-nullable
*/
def team(character1: Character, character2: Character): (Character, Character) = (character1, character2)
} | m-wrona/scala-samples | src/main/scala/com/mwronski/pathdependenttypes/Movie.scala | Scala | mit | 515 |
import collection.mutable.HashMap
trait CoderTrait {
val words: List[String]
(2 -> "ABC", new ArrowAssoc('3') -> "DEF")
private val mnemonics = Map(
'2' -> "ABC", '3' -> "DEF", '4' -> "GHI", '5' -> "JKL",
'6' -> "MNO", '7' -> "PQRS", '8' -> "TUV", '9' -> "WXYZ")
('1', "1") match {
case (digit, str) => true
case _ => false
}
/** Invert the mnemonics map to give a map from chars 'A' ... 'Z' to '2' ... '9' */
private val charCode0: Map[Char, Char] = mnemonics withFilter {
case (digit, str) => true
case _ => false
} flatMap { x$1 =>
x$1 match {
case (digit, str) => str map (ltr => ltr -> digit)
}
}
private val charCode: Map[Char, Char] =
for ((digit, str) <- mnemonics; ltr <- str) yield ltr -> digit
/** Maps a word to the digit string it can represent */
private def wordCode(word: String): String = word map charCode
/** A map from digit strings to the words that represent them */
private val wordsForNum: Map[String, List[String]] =
words groupBy wordCode withDefaultValue Nil
/** All ways to encode a number as a list of words */
def encode(number: String): Set[List[String]] =
if (number.isEmpty) Set(Nil)
else {
for {
splitPoint <- 1 to number.length
word <- wordsForNum(number take splitPoint)
rest <- encode(number drop splitPoint)
} yield word :: rest
}.toSet
/** Maps a number to a list of all word phrases that can represent it */
def translate(number: String): Set[String] = encode(number) map (_ mkString " ")
}
object Coder {
def main(args : Array[String]) : Unit = {
val coder = new CoderTrait {
val words = List("Scala", "sobls", "Python", "Ruby", "C", "A", "rocks", "sucks", "works", "Racka")
}
// println(coder.wordsForNum)
println(coder.translate("7225276257"))
}
}
| yusuke2255/dotty | tests/pos/CoderTrait.scala | Scala | bsd-3-clause | 1,867 |
package chee
import chee.conf.CheeConf
class LoggingProperty extends ch.qos.logback.core.PropertyDefinerBase {
private val config = CheeConf.defaultConfig
@scala.beans.BeanProperty
var name: String = ""
def getPropertyValue(): String = name match {
case "logfile" => config.getString("chee.logFile")
case "loglevel" => config.getString("chee.logLevel")
case _ => ""
}
}
| eikek/chee | src/main/scala/chee/LoggingProperty.scala | Scala | gpl-3.0 | 397 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the βLicenseβ); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an βAS ISβ BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.ctrl.service
import akka.actor.Actor
import akka.actor.Actor.Receive
import akka.pattern.{ask, pipe}
import akka.util.Timeout
import cmwell.ctrl.hc.HealthActor
import scala.concurrent.ExecutionContext.Implicits.global
import scala.language.postfixOps
import scala.concurrent.duration._
/**
* Created by michael on 2/1/16.
*/
class ClusterServiceActor extends Actor {
implicit val timeout = Timeout(3 seconds)
override def receive: Receive = {
case msg => (HealthActor.ref ? msg).pipeTo(sender)
}
}
| hochgi/CM-Well | server/cmwell-controller/src/main/scala/cmwell/ctrl/service/ClusterServiceActor.scala | Scala | apache-2.0 | 1,145 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.optimizer
import org.apache.spark.sql.ProjectForUpdate
import org.apache.spark.sql.catalyst.expressions.{NamedExpression, PredicateHelper}
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, Project}
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.execution.command.mutation.CarbonProjectForUpdateCommand
import org.apache.carbondata.core.constants.CarbonCommonConstants
/**
* Rule specific for IUD operations
*/
class CarbonIUDRule extends Rule[LogicalPlan] with PredicateHelper {
override def apply(plan: LogicalPlan): LogicalPlan = {
processPlan(plan)
}
private def processPlan(plan: LogicalPlan): LogicalPlan = {
plan transform {
case ProjectForUpdate(table, cols, Seq(updatePlan)) =>
var isTransformed = false
val newPlan = updatePlan transform {
case Project(pList, child) if !isTransformed =>
var (dest: Seq[NamedExpression], source: Seq[NamedExpression]) = pList
.splitAt(pList.size - cols.size)
// check complex column
cols.foreach { col =>
val complexExists = "\\"name\\":\\"" + col + "\\""
if (dest.exists(m => m.dataType.json.contains(complexExists))) {
throw new UnsupportedOperationException(
"Unsupported operation on Complex data type")
}
}
// check updated columns exists in table
val diff = cols.diff(dest.map(_.name.toLowerCase))
if (diff.nonEmpty) {
sys.error(s"Unknown column(s) ${ diff.mkString(",") } in table ${ table.tableName }")
}
// modify plan for updated column *in place*
isTransformed = true
source.foreach { col =>
val colName = col.name.substring(0,
col.name.lastIndexOf(CarbonCommonConstants.UPDATED_COL_EXTENSION))
val updateIdx = dest.indexWhere(_.name.equalsIgnoreCase(colName))
dest = dest.updated(updateIdx, col)
}
Project(dest, child)
}
CarbonProjectForUpdateCommand(
newPlan, table.tableIdentifier.database, table.tableIdentifier.table, cols)
}
}
}
| jackylk/incubator-carbondata | integration/spark/src/main/scala/org/apache/spark/sql/optimizer/CarbonIUDRule.scala | Scala | apache-2.0 | 3,063 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.k8s.features
import scala.collection.JavaConverters._
import io.fabric8.kubernetes.api.model.{ContainerBuilder, EnvVarBuilder}
import org.apache.spark.deploy.k8s._
import org.apache.spark.deploy.k8s.Config._
import org.apache.spark.deploy.k8s.Constants._
import org.apache.spark.deploy.k8s.submit._
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config.{PYSPARK_DRIVER_PYTHON, PYSPARK_PYTHON}
import org.apache.spark.launcher.SparkLauncher
/**
* Creates the driver command for running the user app, and propagates needed configuration so
* executors can also find the app code.
*/
private[spark] class DriverCommandFeatureStep(conf: KubernetesDriverConf)
extends KubernetesFeatureConfigStep with Logging {
override def configurePod(pod: SparkPod): SparkPod = {
conf.mainAppResource match {
case JavaMainAppResource(res) =>
configureForJava(pod, res.getOrElse(SparkLauncher.NO_RESOURCE))
case PythonMainAppResource(res) =>
configureForPython(pod, res)
case RMainAppResource(res) =>
configureForR(pod, res)
}
}
override def getAdditionalPodSystemProperties(): Map[String, String] = {
val appType = conf.mainAppResource match {
case JavaMainAppResource(_) =>
APP_RESOURCE_TYPE_JAVA
case PythonMainAppResource(_) =>
APP_RESOURCE_TYPE_PYTHON
case RMainAppResource(_) =>
APP_RESOURCE_TYPE_R
}
Map(APP_RESOURCE_TYPE.key -> appType)
}
private def configureForJava(pod: SparkPod, res: String): SparkPod = {
// re-write primary resource, app jar is also added to spark.jars by default in SparkSubmit
// no uploading takes place here
val newResName = KubernetesUtils
.renameMainAppResource(resource = res, shouldUploadLocal = false)
val driverContainer = baseDriverContainer(pod, newResName).build()
SparkPod(pod.pod, driverContainer)
}
// Exposed for testing purpose.
private[spark] def environmentVariables: Map[String, String] = sys.env
private def configureForPython(pod: SparkPod, res: String): SparkPod = {
if (conf.get(PYSPARK_MAJOR_PYTHON_VERSION).isDefined) {
logWarning(
s"${PYSPARK_MAJOR_PYTHON_VERSION.key} was deprecated in Spark 3.1. " +
s"Please set '${PYSPARK_PYTHON.key}' and '${PYSPARK_DRIVER_PYTHON.key}' " +
s"configurations or $ENV_PYSPARK_PYTHON and $ENV_PYSPARK_DRIVER_PYTHON environment " +
"variables instead.")
}
val pythonEnvs =
Seq(
conf.get(PYSPARK_PYTHON)
.orElse(environmentVariables.get(ENV_PYSPARK_PYTHON)).map { value =>
new EnvVarBuilder()
.withName(ENV_PYSPARK_PYTHON)
.withValue(value)
.build()
},
conf.get(PYSPARK_DRIVER_PYTHON)
.orElse(conf.get(PYSPARK_PYTHON))
.orElse(environmentVariables.get(ENV_PYSPARK_DRIVER_PYTHON))
.orElse(environmentVariables.get(ENV_PYSPARK_PYTHON)).map { value =>
new EnvVarBuilder()
.withName(ENV_PYSPARK_DRIVER_PYTHON)
.withValue(value)
.build()
}
).flatten
// re-write primary resource to be the remote one and upload the related file
val newResName = KubernetesUtils
.renameMainAppResource(res, Option(conf.sparkConf), true)
val pythonContainer = baseDriverContainer(pod, newResName)
.addAllToEnv(pythonEnvs.asJava)
.build()
SparkPod(pod.pod, pythonContainer)
}
private def configureForR(pod: SparkPod, res: String): SparkPod = {
val rContainer = baseDriverContainer(pod, res).build()
SparkPod(pod.pod, rContainer)
}
private def baseDriverContainer(pod: SparkPod, resource: String): ContainerBuilder = {
// re-write primary resource, app jar is also added to spark.jars by default in SparkSubmit
val resolvedResource = if (conf.mainAppResource.isInstanceOf[JavaMainAppResource]) {
KubernetesUtils.renameMainAppResource(resource, Option(conf.sparkConf), false)
} else {
resource
}
var proxyUserArgs = Seq[String]()
if (!conf.proxyUser.isEmpty) {
proxyUserArgs = proxyUserArgs :+ "--proxy-user"
proxyUserArgs = proxyUserArgs :+ conf.proxyUser.get
}
new ContainerBuilder(pod.container)
.addToArgs("driver")
.addToArgs(proxyUserArgs: _*)
.addToArgs("--properties-file", SPARK_CONF_PATH)
.addToArgs("--class", conf.mainClass)
.addToArgs(resolvedResource)
.addToArgs(conf.appArgs: _*)
}
}
| maropu/spark | resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/features/DriverCommandFeatureStep.scala | Scala | apache-2.0 | 5,353 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.codegen.calls
import org.apache.calcite.avatica.util.DateTimeUtils.MILLIS_PER_DAY
import org.apache.calcite.avatica.util.{DateTimeUtils, TimeUnitRange}
import org.apache.calcite.util.BuiltInMethod
import org.apache.flink.api.common.typeinfo.BasicTypeInfo._
import org.apache.flink.api.common.typeinfo._
import org.apache.flink.api.java.typeutils.{MapTypeInfo, ObjectArrayTypeInfo}
import org.apache.flink.table.codegen.CodeGenUtils._
import org.apache.flink.table.codegen.{CodeGenException, CodeGenerator, GeneratedExpression}
import org.apache.flink.table.typeutils.TimeIntervalTypeInfo
import org.apache.flink.table.typeutils.TypeCheckUtils._
object ScalarOperators {
def generateStringConcatOperator(
nullCheck: Boolean,
left: GeneratedExpression,
right: GeneratedExpression)
: GeneratedExpression = {
generateOperatorIfNotNull(nullCheck, STRING_TYPE_INFO, left, right) {
(leftTerm, rightTerm) => s"$leftTerm + $rightTerm"
}
}
def generateArithmeticOperator(
operator: String,
nullCheck: Boolean,
resultType: TypeInformation[_],
left: GeneratedExpression,
right: GeneratedExpression)
: GeneratedExpression = {
val leftCasting = numericCasting(left.resultType, resultType)
val rightCasting = numericCasting(right.resultType, resultType)
val resultTypeTerm = primitiveTypeTermForTypeInfo(resultType)
generateOperatorIfNotNull(nullCheck, resultType, left, right) {
(leftTerm, rightTerm) =>
if (isDecimal(resultType)) {
s"${leftCasting(leftTerm)}.${arithOpToDecMethod(operator)}(${rightCasting(rightTerm)})"
} else {
s"($resultTypeTerm) (${leftCasting(leftTerm)} $operator ${rightCasting(rightTerm)})"
}
}
}
def generateUnaryArithmeticOperator(
operator: String,
nullCheck: Boolean,
resultType: TypeInformation[_],
operand: GeneratedExpression)
: GeneratedExpression = {
generateUnaryOperatorIfNotNull(nullCheck, resultType, operand) {
(operandTerm) =>
if (isDecimal(operand.resultType) && operator == "-") {
s"$operandTerm.negate()"
} else if (isDecimal(operand.resultType) && operator == "+") {
s"$operandTerm"
} else {
s"$operator($operandTerm)"
}
}
}
def generateEquals(
nullCheck: Boolean,
left: GeneratedExpression,
right: GeneratedExpression)
: GeneratedExpression = {
// numeric types
if (isNumeric(left.resultType) && isNumeric(right.resultType)) {
generateComparison("==", nullCheck, left, right)
}
// temporal types
else if (isTemporal(left.resultType) && left.resultType == right.resultType) {
generateComparison("==", nullCheck, left, right)
}
// array types
else if (isArray(left.resultType) &&
left.resultType.getTypeClass == right.resultType.getTypeClass) {
generateOperatorIfNotNull(nullCheck, BOOLEAN_TYPE_INFO, left, right) {
(leftTerm, rightTerm) => s"java.util.Arrays.equals($leftTerm, $rightTerm)"
}
}
// comparable types of same type
else if (isComparable(left.resultType) && left.resultType == right.resultType) {
generateComparison("==", nullCheck, left, right)
}
// non comparable types
else {
generateOperatorIfNotNull(nullCheck, BOOLEAN_TYPE_INFO, left, right) {
if (isReference(left)) {
(leftTerm, rightTerm) => s"$leftTerm.equals($rightTerm)"
}
else if (isReference(right)) {
(leftTerm, rightTerm) => s"$rightTerm.equals($leftTerm)"
}
else {
throw new CodeGenException(s"Incomparable types: ${left.resultType} and " +
s"${right.resultType}")
}
}
}
}
def generateNotEquals(
nullCheck: Boolean,
left: GeneratedExpression,
right: GeneratedExpression)
: GeneratedExpression = {
// numeric types
if (isNumeric(left.resultType) && isNumeric(right.resultType)) {
generateComparison("!=", nullCheck, left, right)
}
// temporal types
else if (isTemporal(left.resultType) && left.resultType == right.resultType) {
generateComparison("!=", nullCheck, left, right)
}
// array types
else if (isArray(left.resultType) &&
left.resultType.getTypeClass == right.resultType.getTypeClass) {
generateOperatorIfNotNull(nullCheck, BOOLEAN_TYPE_INFO, left, right) {
(leftTerm, rightTerm) => s"!java.util.Arrays.equals($leftTerm, $rightTerm)"
}
}
// comparable types
else if (isComparable(left.resultType) && left.resultType == right.resultType) {
generateComparison("!=", nullCheck, left, right)
}
// non-comparable types
else {
generateOperatorIfNotNull(nullCheck, BOOLEAN_TYPE_INFO, left, right) {
if (isReference(left)) {
(leftTerm, rightTerm) => s"!($leftTerm.equals($rightTerm))"
}
else if (isReference(right)) {
(leftTerm, rightTerm) => s"!($rightTerm.equals($leftTerm))"
}
else {
throw new CodeGenException(s"Incomparable types: ${left.resultType} and " +
s"${right.resultType}")
}
}
}
}
/**
* Generates comparison code for numeric types and comparable types of same type.
*/
def generateComparison(
operator: String,
nullCheck: Boolean,
left: GeneratedExpression,
right: GeneratedExpression)
: GeneratedExpression = {
generateOperatorIfNotNull(nullCheck, BOOLEAN_TYPE_INFO, left, right) {
// left is decimal or both sides are decimal
if (isDecimal(left.resultType) && isNumeric(right.resultType)) {
(leftTerm, rightTerm) => {
val operandCasting = numericCasting(right.resultType, left.resultType)
s"$leftTerm.compareTo(${operandCasting(rightTerm)}) $operator 0"
}
}
// right is decimal
else if (isNumeric(left.resultType) && isDecimal(right.resultType)) {
(leftTerm, rightTerm) => {
val operandCasting = numericCasting(left.resultType, right.resultType)
s"${operandCasting(leftTerm)}.compareTo($rightTerm) $operator 0"
}
}
// both sides are numeric
else if (isNumeric(left.resultType) && isNumeric(right.resultType)) {
(leftTerm, rightTerm) => s"$leftTerm $operator $rightTerm"
}
// both sides are temporal of same type
else if (isTemporal(left.resultType) && left.resultType == right.resultType) {
(leftTerm, rightTerm) => s"$leftTerm $operator $rightTerm"
}
// both sides are boolean
else if (isBoolean(left.resultType) && left.resultType == right.resultType) {
operator match {
case "==" | "!=" => (leftTerm, rightTerm) => s"$leftTerm $operator $rightTerm"
case _ => throw new CodeGenException(s"Unsupported boolean comparison '$operator'.")
}
}
// both sides are same comparable type
else if (isComparable(left.resultType) && left.resultType == right.resultType) {
(leftTerm, rightTerm) => s"$leftTerm.compareTo($rightTerm) $operator 0"
}
else {
throw new CodeGenException(s"Incomparable types: ${left.resultType} and " +
s"${right.resultType}")
}
}
}
def generateIsNull(
nullCheck: Boolean,
operand: GeneratedExpression)
: GeneratedExpression = {
val resultTerm = newName("result")
val nullTerm = newName("isNull")
val operatorCode = if (nullCheck) {
s"""
|${operand.code}
|boolean $resultTerm = ${operand.nullTerm};
|boolean $nullTerm = false;
|""".stripMargin
}
else if (!nullCheck && isReference(operand)) {
s"""
|${operand.code}
|boolean $resultTerm = ${operand.resultTerm} == null;
|boolean $nullTerm = false;
|""".stripMargin
}
else {
s"""
|${operand.code}
|boolean $resultTerm = false;
|""".stripMargin
}
GeneratedExpression(resultTerm, nullTerm, operatorCode, BOOLEAN_TYPE_INFO)
}
def generateIsNotNull(
nullCheck: Boolean,
operand: GeneratedExpression)
: GeneratedExpression = {
val resultTerm = newName("result")
val nullTerm = newName("isNull")
val operatorCode = if (nullCheck) {
s"""
|${operand.code}
|boolean $resultTerm = !${operand.nullTerm};
|boolean $nullTerm = false;
|""".stripMargin
}
else if (!nullCheck && isReference(operand)) {
s"""
|${operand.code}
|boolean $resultTerm = ${operand.resultTerm} != null;
|boolean $nullTerm = false;
|""".stripMargin
}
else {
s"""
|${operand.code}
|boolean $resultTerm = true;
|""".stripMargin
}
GeneratedExpression(resultTerm, nullTerm, operatorCode, BOOLEAN_TYPE_INFO)
}
def generateAnd(
nullCheck: Boolean,
left: GeneratedExpression,
right: GeneratedExpression)
: GeneratedExpression = {
val resultTerm = newName("result")
val nullTerm = newName("isNull")
val operatorCode = if (nullCheck) {
// Three-valued logic:
// no Unknown -> Two-valued logic
// True && Unknown -> Unknown
// False && Unknown -> False
// Unknown && True -> Unknown
// Unknown && False -> False
// Unknown && Unknown -> Unknown
s"""
|${left.code}
|
|boolean $resultTerm = false;
|boolean $nullTerm = false;
|if (!${left.nullTerm} && !${left.resultTerm}) {
| // left expr is false, skip right expr
|} else {
| ${right.code}
|
| if (!${left.nullTerm} && !${right.nullTerm}) {
| $resultTerm = ${left.resultTerm} && ${right.resultTerm};
| $nullTerm = false;
| }
| else if (!${left.nullTerm} && ${left.resultTerm} && ${right.nullTerm}) {
| $resultTerm = false;
| $nullTerm = true;
| }
| else if (!${left.nullTerm} && !${left.resultTerm} && ${right.nullTerm}) {
| $resultTerm = false;
| $nullTerm = false;
| }
| else if (${left.nullTerm} && !${right.nullTerm} && ${right.resultTerm}) {
| $resultTerm = false;
| $nullTerm = true;
| }
| else if (${left.nullTerm} && !${right.nullTerm} && !${right.resultTerm}) {
| $resultTerm = false;
| $nullTerm = false;
| }
| else {
| $resultTerm = false;
| $nullTerm = true;
| }
|}
""".stripMargin
}
else {
s"""
|${left.code}
|boolean $resultTerm = false;
|if (${left.resultTerm}) {
| ${right.code}
| $resultTerm = ${right.resultTerm};
|}
|""".stripMargin
}
GeneratedExpression(resultTerm, nullTerm, operatorCode, BOOLEAN_TYPE_INFO)
}
def generateOr(
nullCheck: Boolean,
left: GeneratedExpression,
right: GeneratedExpression)
: GeneratedExpression = {
val resultTerm = newName("result")
val nullTerm = newName("isNull")
val operatorCode = if (nullCheck) {
// Three-valued logic:
// no Unknown -> Two-valued logic
// True || Unknown -> True
// False || Unknown -> Unknown
// Unknown || True -> True
// Unknown || False -> Unknown
// Unknown || Unknown -> Unknown
s"""
|${left.code}
|
|boolean $resultTerm = true;
|boolean $nullTerm = false;
|if (!${left.nullTerm} && ${left.resultTerm}) {
| // left expr is true, skip right expr
|} else {
| ${right.code}
|
| if (!${left.nullTerm} && !${right.nullTerm}) {
| $resultTerm = ${left.resultTerm} || ${right.resultTerm};
| $nullTerm = false;
| }
| else if (!${left.nullTerm} && ${left.resultTerm} && ${right.nullTerm}) {
| $resultTerm = true;
| $nullTerm = false;
| }
| else if (!${left.nullTerm} && !${left.resultTerm} && ${right.nullTerm}) {
| $resultTerm = false;
| $nullTerm = true;
| }
| else if (${left.nullTerm} && !${right.nullTerm} && ${right.resultTerm}) {
| $resultTerm = true;
| $nullTerm = false;
| }
| else if (${left.nullTerm} && !${right.nullTerm} && !${right.resultTerm}) {
| $resultTerm = false;
| $nullTerm = true;
| }
| else {
| $resultTerm = false;
| $nullTerm = true;
| }
|}
|""".stripMargin
}
else {
s"""
|${left.code}
|boolean $resultTerm = true;
|if (!${left.resultTerm}) {
| ${right.code}
| $resultTerm = ${right.resultTerm};
|}
|""".stripMargin
}
GeneratedExpression(resultTerm, nullTerm, operatorCode, BOOLEAN_TYPE_INFO)
}
def generateNot(
nullCheck: Boolean,
operand: GeneratedExpression)
: GeneratedExpression = {
// Three-valued logic:
// no Unknown -> Two-valued logic
// Unknown -> Unknown
generateUnaryOperatorIfNotNull(nullCheck, BOOLEAN_TYPE_INFO, operand) {
(operandTerm) => s"!($operandTerm)"
}
}
def generateIsTrue(operand: GeneratedExpression): GeneratedExpression = {
GeneratedExpression(
operand.resultTerm, // unknown is always false by default
GeneratedExpression.NEVER_NULL,
operand.code,
BOOLEAN_TYPE_INFO)
}
def generateIsNotTrue(operand: GeneratedExpression): GeneratedExpression = {
GeneratedExpression(
s"(!${operand.resultTerm})", // unknown is always false by default
GeneratedExpression.NEVER_NULL,
operand.code,
BOOLEAN_TYPE_INFO)
}
def generateIsFalse(operand: GeneratedExpression): GeneratedExpression = {
GeneratedExpression(
s"(!${operand.resultTerm} && !${operand.nullTerm})",
GeneratedExpression.NEVER_NULL,
operand.code,
BOOLEAN_TYPE_INFO)
}
def generateIsNotFalse(operand: GeneratedExpression): GeneratedExpression = {
GeneratedExpression(
s"(${operand.resultTerm} || ${operand.nullTerm})",
GeneratedExpression.NEVER_NULL,
operand.code,
BOOLEAN_TYPE_INFO)
}
def generateCast(
nullCheck: Boolean,
operand: GeneratedExpression,
targetType: TypeInformation[_])
: GeneratedExpression = (operand.resultType, targetType) match {
// identity casting
case (fromTp, toTp) if fromTp == toTp =>
operand
// array identity casting
// (e.g. for Integer[] that can be ObjectArrayTypeInfo or BasicArrayTypeInfo)
case (fromTp, toTp) if isArray(fromTp) && fromTp.getTypeClass == toTp.getTypeClass =>
operand
// Date/Time/Timestamp -> String
case (dtt: SqlTimeTypeInfo[_], STRING_TYPE_INFO) =>
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s"${internalToTimePointCode(dtt, operandTerm)}.toString()"
}
// Interval Months -> String
case (TimeIntervalTypeInfo.INTERVAL_MONTHS, STRING_TYPE_INFO) =>
val method = qualifyMethod(BuiltInMethod.INTERVAL_YEAR_MONTH_TO_STRING.method)
val timeUnitRange = qualifyEnum(TimeUnitRange.YEAR_TO_MONTH)
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s"$method($operandTerm, $timeUnitRange)"
}
// Interval Millis -> String
case (TimeIntervalTypeInfo.INTERVAL_MILLIS, STRING_TYPE_INFO) =>
val method = qualifyMethod(BuiltInMethod.INTERVAL_DAY_TIME_TO_STRING.method)
val timeUnitRange = qualifyEnum(TimeUnitRange.DAY_TO_SECOND)
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s"$method($operandTerm, $timeUnitRange, 3)" // milli second precision
}
// Object array -> String
case (_: ObjectArrayTypeInfo[_, _] | _: BasicArrayTypeInfo[_, _], STRING_TYPE_INFO) =>
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s"java.util.Arrays.deepToString($operandTerm)"
}
// Primitive array -> String
case (_: PrimitiveArrayTypeInfo[_], STRING_TYPE_INFO) =>
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s"java.util.Arrays.toString($operandTerm)"
}
// * (not Date/Time/Timestamp) -> String
case (_, STRING_TYPE_INFO) =>
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s""" "" + $operandTerm"""
}
// * -> Character
case (_, CHAR_TYPE_INFO) =>
throw new CodeGenException("Character type not supported.")
// String -> NUMERIC TYPE (not Character), Boolean
case (STRING_TYPE_INFO, _: NumericTypeInfo[_])
| (STRING_TYPE_INFO, BOOLEAN_TYPE_INFO) =>
val wrapperClass = targetType.getTypeClass.getCanonicalName
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s"$wrapperClass.valueOf($operandTerm)"
}
// String -> BigDecimal
case (STRING_TYPE_INFO, BIG_DEC_TYPE_INFO) =>
val wrapperClass = targetType.getTypeClass.getCanonicalName
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s"new $wrapperClass($operandTerm)"
}
// String -> Date
case (STRING_TYPE_INFO, SqlTimeTypeInfo.DATE) =>
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s"${qualifyMethod(BuiltInMethod.STRING_TO_DATE.method)}($operandTerm)"
}
// String -> Time
case (STRING_TYPE_INFO, SqlTimeTypeInfo.TIME) =>
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s"${qualifyMethod(BuiltInMethod.STRING_TO_TIME.method)}($operandTerm)"
}
// String -> Timestamp
case (STRING_TYPE_INFO, SqlTimeTypeInfo.TIMESTAMP) =>
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s"${qualifyMethod(BuiltInMethod.STRING_TO_TIMESTAMP.method)}" +
s"($operandTerm)"
}
// Boolean -> NUMERIC TYPE
case (BOOLEAN_TYPE_INFO, nti: NumericTypeInfo[_]) =>
val targetTypeTerm = primitiveTypeTermForTypeInfo(nti)
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s"($targetTypeTerm) ($operandTerm ? 1 : 0)"
}
// Boolean -> BigDecimal
case (BOOLEAN_TYPE_INFO, BIG_DEC_TYPE_INFO) =>
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s"$operandTerm ? java.math.BigDecimal.ONE : java.math.BigDecimal.ZERO"
}
// NUMERIC TYPE -> Boolean
case (_: NumericTypeInfo[_], BOOLEAN_TYPE_INFO) =>
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s"$operandTerm != 0"
}
// BigDecimal -> Boolean
case (BIG_DEC_TYPE_INFO, BOOLEAN_TYPE_INFO) =>
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s"$operandTerm.compareTo(java.math.BigDecimal.ZERO) != 0"
}
// NUMERIC TYPE, BigDecimal -> NUMERIC TYPE, BigDecimal
case (_: NumericTypeInfo[_], _: NumericTypeInfo[_])
| (BIG_DEC_TYPE_INFO, _: NumericTypeInfo[_])
| (_: NumericTypeInfo[_], BIG_DEC_TYPE_INFO) =>
val operandCasting = numericCasting(operand.resultType, targetType)
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s"${operandCasting(operandTerm)}"
}
// Date -> Timestamp
case (SqlTimeTypeInfo.DATE, SqlTimeTypeInfo.TIMESTAMP) =>
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) =>
s"$operandTerm * ${classOf[DateTimeUtils].getCanonicalName}.MILLIS_PER_DAY"
}
// Timestamp -> Date
case (SqlTimeTypeInfo.TIMESTAMP, SqlTimeTypeInfo.DATE) =>
val targetTypeTerm = primitiveTypeTermForTypeInfo(targetType)
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) =>
s"($targetTypeTerm) ($operandTerm / " +
s"${classOf[DateTimeUtils].getCanonicalName}.MILLIS_PER_DAY)"
}
// Time -> Timestamp
case (SqlTimeTypeInfo.TIME, SqlTimeTypeInfo.TIMESTAMP) =>
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) => s"$operandTerm"
}
// Timestamp -> Time
case (SqlTimeTypeInfo.TIMESTAMP, SqlTimeTypeInfo.TIME) =>
val targetTypeTerm = primitiveTypeTermForTypeInfo(targetType)
generateUnaryOperatorIfNotNull(nullCheck, targetType, operand) {
(operandTerm) =>
s"($targetTypeTerm) ($operandTerm % " +
s"${classOf[DateTimeUtils].getCanonicalName}.MILLIS_PER_DAY)"
}
// internal temporal casting
// Date -> Integer
// Time -> Integer
// Timestamp -> Long
// Integer -> Date
// Integer -> Time
// Long -> Timestamp
// Integer -> Interval Months
// Long -> Interval Millis
// Interval Months -> Integer
// Interval Millis -> Long
case (SqlTimeTypeInfo.DATE, INT_TYPE_INFO) |
(SqlTimeTypeInfo.TIME, INT_TYPE_INFO) |
(SqlTimeTypeInfo.TIMESTAMP, LONG_TYPE_INFO) |
(INT_TYPE_INFO, SqlTimeTypeInfo.DATE) |
(INT_TYPE_INFO, SqlTimeTypeInfo.TIME) |
(LONG_TYPE_INFO, SqlTimeTypeInfo.TIMESTAMP) |
(INT_TYPE_INFO, TimeIntervalTypeInfo.INTERVAL_MONTHS) |
(LONG_TYPE_INFO, TimeIntervalTypeInfo.INTERVAL_MILLIS) |
(TimeIntervalTypeInfo.INTERVAL_MONTHS, INT_TYPE_INFO) |
(TimeIntervalTypeInfo.INTERVAL_MILLIS, LONG_TYPE_INFO) =>
internalExprCasting(operand, targetType)
// internal reinterpretation of temporal types
// Date, Time, Interval Months -> Long
case (SqlTimeTypeInfo.DATE, LONG_TYPE_INFO)
| (SqlTimeTypeInfo.TIME, LONG_TYPE_INFO)
| (TimeIntervalTypeInfo.INTERVAL_MONTHS, LONG_TYPE_INFO) =>
internalExprCasting(operand, targetType)
case (from, to) =>
throw new CodeGenException(s"Unsupported cast from '$from' to '$to'.")
}
def generateIfElse(
nullCheck: Boolean,
operands: Seq[GeneratedExpression],
resultType: TypeInformation[_],
i: Int = 0)
: GeneratedExpression = {
// else part
if (i == operands.size - 1) {
generateCast(nullCheck, operands(i), resultType)
}
else {
// check that the condition is boolean
// we do not check for null instead we use the default value
// thus null is false
requireBoolean(operands(i))
val condition = operands(i)
val trueAction = generateCast(nullCheck, operands(i + 1), resultType)
val falseAction = generateIfElse(nullCheck, operands, resultType, i + 2)
val resultTerm = newName("result")
val nullTerm = newName("isNull")
val resultTypeTerm = primitiveTypeTermForTypeInfo(resultType)
val operatorCode = if (nullCheck) {
s"""
|${condition.code}
|$resultTypeTerm $resultTerm;
|boolean $nullTerm;
|if (${condition.resultTerm}) {
| ${trueAction.code}
| $resultTerm = ${trueAction.resultTerm};
| $nullTerm = ${trueAction.nullTerm};
|}
|else {
| ${falseAction.code}
| $resultTerm = ${falseAction.resultTerm};
| $nullTerm = ${falseAction.nullTerm};
|}
|""".stripMargin
}
else {
s"""
|${condition.code}
|$resultTypeTerm $resultTerm;
|if (${condition.resultTerm}) {
| ${trueAction.code}
| $resultTerm = ${trueAction.resultTerm};
|}
|else {
| ${falseAction.code}
| $resultTerm = ${falseAction.resultTerm};
|}
|""".stripMargin
}
GeneratedExpression(resultTerm, nullTerm, operatorCode, resultType)
}
}
def generateTemporalPlusMinus(
plus: Boolean,
nullCheck: Boolean,
left: GeneratedExpression,
right: GeneratedExpression)
: GeneratedExpression = {
val op = if (plus) "+" else "-"
(left.resultType, right.resultType) match {
case (l: TimeIntervalTypeInfo[_], r: TimeIntervalTypeInfo[_]) if l == r =>
generateArithmeticOperator(op, nullCheck, l, left, right)
case (SqlTimeTypeInfo.DATE, TimeIntervalTypeInfo.INTERVAL_MILLIS) =>
generateOperatorIfNotNull(nullCheck, SqlTimeTypeInfo.DATE, left, right) {
(l, r) => s"$l $op ((int) ($r / ${MILLIS_PER_DAY}L))"
}
case (SqlTimeTypeInfo.DATE, TimeIntervalTypeInfo.INTERVAL_MONTHS) =>
generateOperatorIfNotNull(nullCheck, SqlTimeTypeInfo.DATE, left, right) {
(l, r) => s"${qualifyMethod(BuiltInMethod.ADD_MONTHS.method)}($l, $op($r))"
}
case (SqlTimeTypeInfo.TIME, TimeIntervalTypeInfo.INTERVAL_MILLIS) =>
generateOperatorIfNotNull(nullCheck, SqlTimeTypeInfo.TIME, left, right) {
(l, r) => s"$l $op ((int) ($r))"
}
case (SqlTimeTypeInfo.TIMESTAMP, TimeIntervalTypeInfo.INTERVAL_MILLIS) =>
generateOperatorIfNotNull(nullCheck, SqlTimeTypeInfo.TIMESTAMP, left, right) {
(l, r) => s"$l $op $r"
}
case (SqlTimeTypeInfo.TIMESTAMP, TimeIntervalTypeInfo.INTERVAL_MONTHS) =>
generateOperatorIfNotNull(nullCheck, SqlTimeTypeInfo.TIMESTAMP, left, right) {
(l, r) => s"${qualifyMethod(BuiltInMethod.ADD_MONTHS.method)}($l, $op($r))"
}
case _ =>
throw new CodeGenException("Unsupported temporal arithmetic.")
}
}
def generateUnaryIntervalPlusMinus(
plus: Boolean,
nullCheck: Boolean,
operand: GeneratedExpression)
: GeneratedExpression = {
val operator = if (plus) "+" else "-"
generateUnaryArithmeticOperator(operator, nullCheck, operand.resultType, operand)
}
def generateArray(
codeGenerator: CodeGenerator,
resultType: TypeInformation[_],
elements: Seq[GeneratedExpression])
: GeneratedExpression = {
val arrayTerm = codeGenerator.addReusableArray(resultType.getTypeClass, elements.size)
val boxedElements: Seq[GeneratedExpression] = resultType match {
case oati: ObjectArrayTypeInfo[_, _] =>
// we box the elements to also represent null values
val boxedTypeTerm = boxedTypeTermForTypeInfo(oati.getComponentInfo)
elements.map { e =>
val boxedExpr = codeGenerator.generateOutputFieldBoxing(e)
val exprOrNull: String = if (codeGenerator.nullCheck) {
s"${boxedExpr.nullTerm} ? null : ($boxedTypeTerm) ${boxedExpr.resultTerm}"
} else {
boxedExpr.resultTerm
}
boxedExpr.copy(resultTerm = exprOrNull)
}
// no boxing necessary
case _: PrimitiveArrayTypeInfo[_] => elements
}
val code = boxedElements
.zipWithIndex
.map { case (element, idx) =>
s"""
|${element.code}
|$arrayTerm[$idx] = ${element.resultTerm};
|""".stripMargin
}
.mkString("\n")
GeneratedExpression(arrayTerm, GeneratedExpression.NEVER_NULL, code, resultType)
}
def generateArrayElementAt(
codeGenerator: CodeGenerator,
array: GeneratedExpression,
index: GeneratedExpression)
: GeneratedExpression = {
val resultTerm = newName("result")
def unboxArrayElement(componentInfo: TypeInformation[_]): GeneratedExpression = {
// get boxed array element
val resultTypeTerm = boxedTypeTermForTypeInfo(componentInfo)
val arrayAccessCode = if (codeGenerator.nullCheck) {
s"""
|${array.code}
|${index.code}
|$resultTypeTerm $resultTerm = (${array.nullTerm} || ${index.nullTerm}) ?
| null : ${array.resultTerm}[${index.resultTerm} - 1];
|""".stripMargin
} else {
s"""
|${array.code}
|${index.code}
|$resultTypeTerm $resultTerm = ${array.resultTerm}[${index.resultTerm} - 1];
|""".stripMargin
}
// generate unbox code
val unboxing = codeGenerator.generateInputFieldUnboxing(componentInfo, resultTerm)
unboxing.copy(code =
s"""
|$arrayAccessCode
|${unboxing.code}
|""".stripMargin
)
}
array.resultType match {
// unbox object array types
case oati: ObjectArrayTypeInfo[_, _] =>
unboxArrayElement(oati.getComponentInfo)
// unbox basic array types
case bati: BasicArrayTypeInfo[_, _] =>
unboxArrayElement(bati.getComponentInfo)
// no unboxing necessary
case pati: PrimitiveArrayTypeInfo[_] =>
generateOperatorIfNotNull(codeGenerator.nullCheck, pati.getComponentType, array, index) {
(leftTerm, rightTerm) => s"$leftTerm[$rightTerm - 1]"
}
}
}
def generateArrayElement(
codeGenerator: CodeGenerator,
array: GeneratedExpression)
: GeneratedExpression = {
val nullTerm = newName("isNull")
val resultTerm = newName("result")
val resultType = array.resultType match {
case oati: ObjectArrayTypeInfo[_, _] => oati.getComponentInfo
case bati: BasicArrayTypeInfo[_, _] => bati.getComponentInfo
case pati: PrimitiveArrayTypeInfo[_] => pati.getComponentType
}
val resultTypeTerm = primitiveTypeTermForTypeInfo(resultType)
val defaultValue = primitiveDefaultValue(resultType)
val arrayLengthCode = if (codeGenerator.nullCheck) {
s"${array.nullTerm} ? 0 : ${array.resultTerm}.length"
} else {
s"${array.resultTerm}.length"
}
def unboxArrayElement(componentInfo: TypeInformation[_]): String = {
// generate unboxing code
val unboxing = codeGenerator.generateInputFieldUnboxing(
componentInfo,
s"${array.resultTerm}[0]")
s"""
|${array.code}
|${if (codeGenerator.nullCheck) s"boolean $nullTerm;" else "" }
|$resultTypeTerm $resultTerm;
|switch ($arrayLengthCode) {
| case 0:
| ${if (codeGenerator.nullCheck) s"$nullTerm = true;" else "" }
| $resultTerm = $defaultValue;
| break;
| case 1:
| ${unboxing.code}
| ${if (codeGenerator.nullCheck) s"$nullTerm = ${unboxing.nullTerm};" else "" }
| $resultTerm = ${unboxing.resultTerm};
| break;
| default:
| throw new RuntimeException("Array has more than one element.");
|}
|""".stripMargin
}
val arrayAccessCode = array.resultType match {
case oati: ObjectArrayTypeInfo[_, _] =>
unboxArrayElement(oati.getComponentInfo)
case bati: BasicArrayTypeInfo[_, _] =>
unboxArrayElement(bati.getComponentInfo)
case pati: PrimitiveArrayTypeInfo[_] =>
s"""
|${array.code}
|${if (codeGenerator.nullCheck) s"boolean $nullTerm;" else "" }
|$resultTypeTerm $resultTerm;
|switch ($arrayLengthCode) {
| case 0:
| ${if (codeGenerator.nullCheck) s"$nullTerm = true;" else "" }
| $resultTerm = $defaultValue;
| break;
| case 1:
| ${if (codeGenerator.nullCheck) s"$nullTerm = false;" else "" }
| $resultTerm = ${array.resultTerm}[0];
| break;
| default:
| throw new RuntimeException("Array has more than one element.");
|}
|""".stripMargin
}
GeneratedExpression(resultTerm, nullTerm, arrayAccessCode, resultType)
}
def generateArrayCardinality(
nullCheck: Boolean,
array: GeneratedExpression)
: GeneratedExpression = {
generateUnaryOperatorIfNotNull(nullCheck, INT_TYPE_INFO, array) {
(operandTerm) => s"${array.resultTerm}.length"
}
}
def generateMapGet(
codeGenerator: CodeGenerator,
map: GeneratedExpression,
key: GeneratedExpression)
: GeneratedExpression = {
val resultTerm = newName("result")
val nullTerm = newName("isNull")
val ty = map.resultType.asInstanceOf[MapTypeInfo[_,_]]
val resultType = ty.getValueTypeInfo
val resultTypeTerm = boxedTypeTermForTypeInfo(ty.getValueTypeInfo)
val accessCode = if (codeGenerator.nullCheck) {
s"""
|${map.code}
|${key.code}
|boolean $nullTerm = (${map.nullTerm} || ${key.nullTerm});
|$resultTypeTerm $resultTerm = $nullTerm ?
| null : ($resultTypeTerm) ${map.resultTerm}.get(${key.resultTerm});
|""".stripMargin
} else {
s"""
|${map.code}
|${key.code}
|$resultTypeTerm $resultTerm = ($resultTypeTerm)
| ${map.resultTerm}.get(${key.resultTerm});
|""".stripMargin
}
GeneratedExpression(resultTerm, nullTerm, accessCode, resultType)
}
// ----------------------------------------------------------------------------------------------
private def generateUnaryOperatorIfNotNull(
nullCheck: Boolean,
resultType: TypeInformation[_],
operand: GeneratedExpression)
(expr: (String) => String)
: GeneratedExpression = {
val resultTerm = newName("result")
val nullTerm = newName("isNull")
val resultTypeTerm = primitiveTypeTermForTypeInfo(resultType)
val defaultValue = primitiveDefaultValue(resultType)
val operatorCode = if (nullCheck) {
s"""
|${operand.code}
|$resultTypeTerm $resultTerm;
|boolean $nullTerm;
|if (!${operand.nullTerm}) {
| $resultTerm = ${expr(operand.resultTerm)};
| $nullTerm = false;
|}
|else {
| $resultTerm = $defaultValue;
| $nullTerm = true;
|}
|""".stripMargin
}
else {
s"""
|${operand.code}
|$resultTypeTerm $resultTerm = ${expr(operand.resultTerm)};
|""".stripMargin
}
GeneratedExpression(resultTerm, nullTerm, operatorCode, resultType)
}
private def generateOperatorIfNotNull(
nullCheck: Boolean,
resultType: TypeInformation[_],
left: GeneratedExpression,
right: GeneratedExpression)
(expr: (String, String) => String)
: GeneratedExpression = {
val resultTerm = newName("result")
val nullTerm = newName("isNull")
val resultTypeTerm = primitiveTypeTermForTypeInfo(resultType)
val defaultValue = primitiveDefaultValue(resultType)
val resultCode = if (nullCheck) {
s"""
|${left.code}
|${right.code}
|boolean $nullTerm = ${left.nullTerm} || ${right.nullTerm};
|$resultTypeTerm $resultTerm;
|if ($nullTerm) {
| $resultTerm = $defaultValue;
|}
|else {
| $resultTerm = ${expr(left.resultTerm, right.resultTerm)};
|}
|""".stripMargin
}
else {
s"""
|${left.code}
|${right.code}
|$resultTypeTerm $resultTerm = ${expr(left.resultTerm, right.resultTerm)};
|""".stripMargin
}
GeneratedExpression(resultTerm, nullTerm, resultCode, resultType)
}
private def internalExprCasting(
expr: GeneratedExpression,
typeInfo: TypeInformation[_])
: GeneratedExpression = {
GeneratedExpression(expr.resultTerm, expr.nullTerm, expr.code, typeInfo)
}
private def arithOpToDecMethod(operator: String): String = operator match {
case "+" => "add"
case "-" => "subtract"
case "*" => "multiply"
case "/" => "divide"
case "%" => "remainder"
case _ => throw new CodeGenException("Unsupported decimal arithmetic operator.")
}
private def numericCasting(
operandType: TypeInformation[_],
resultType: TypeInformation[_])
: (String) => String = {
def decToPrimMethod(targetType: TypeInformation[_]): String = targetType match {
case BYTE_TYPE_INFO => "byteValueExact"
case SHORT_TYPE_INFO => "shortValueExact"
case INT_TYPE_INFO => "intValueExact"
case LONG_TYPE_INFO => "longValueExact"
case FLOAT_TYPE_INFO => "floatValue"
case DOUBLE_TYPE_INFO => "doubleValue"
case _ => throw new CodeGenException("Unsupported decimal casting type.")
}
val resultTypeTerm = primitiveTypeTermForTypeInfo(resultType)
// no casting necessary
if (operandType == resultType) {
(operandTerm) => s"$operandTerm"
}
// result type is decimal but numeric operand is not
else if (isDecimal(resultType) && !isDecimal(operandType) && isNumeric(operandType)) {
(operandTerm) =>
s"java.math.BigDecimal.valueOf((${superPrimitive(operandType)}) $operandTerm)"
}
// numeric result type is not decimal but operand is
else if (isNumeric(resultType) && !isDecimal(resultType) && isDecimal(operandType) ) {
(operandTerm) => s"$operandTerm.${decToPrimMethod(resultType)}()"
}
// result type and operand type are numeric but not decimal
else if (isNumeric(operandType) && isNumeric(resultType)
&& !isDecimal(operandType) && !isDecimal(resultType)) {
(operandTerm) => s"(($resultTypeTerm) $operandTerm)"
}
else {
throw new CodeGenException(s"Unsupported casting from $operandType to $resultType.")
}
}
}
| Xpray/flink | flink-libraries/flink-table/src/main/scala/org/apache/flink/table/codegen/calls/ScalarOperators.scala | Scala | apache-2.0 | 38,397 |
package com.softwaremill.codebrag.auth
import org.scalatra.ScalatraBase
import org.scalatra.auth.ScentryStrategy
import com.softwaremill.codebrag.service.user.Authenticator
import javax.servlet.http.{HttpServletResponse, HttpServletRequest}
import com.softwaremill.codebrag.domain.User
class UserPasswordStrategy(protected val app: ScalatraBase, login: String, password: String, val authenticator: Authenticator) extends ScentryStrategy[User] {
override def name: String = UserPassword.name
override def isValid(implicit request: HttpServletRequest) = {
!login.isEmpty && !password.isEmpty
}
override def authenticate()(implicit request: HttpServletRequest, response: HttpServletResponse) = {
authenticator.authenticate(login, password)
}
}
object UserPassword {
val name = "UserPassword"
}
| softwaremill/codebrag | codebrag-rest/src/main/scala/com/softwaremill/codebrag/auth/UserPasswordStrategy.scala | Scala | agpl-3.0 | 820 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.zeppelin.flink
import java.util.Properties
import org.junit.Assert.assertEquals
import org.scalatest.FunSuite
class FlinkScalaInterpreterTest extends FunSuite {
test("testReplaceYarnAddress") {
val flinkScalaInterpreter = new FlinkScalaInterpreter(new Properties())
var targetURL = flinkScalaInterpreter.replaceYarnAddress("http://localhost:8081",
"http://my-server:9090/gateway")
assertEquals("http://my-server:9090/gateway", targetURL)
targetURL = flinkScalaInterpreter.replaceYarnAddress("https://localhost:8081/",
"https://my-server:9090/gateway")
assertEquals("https://my-server:9090/gateway/", targetURL)
targetURL = flinkScalaInterpreter.replaceYarnAddress("https://localhost:8081/proxy/app_1",
"https://my-server:9090/gateway")
assertEquals("https://my-server:9090/gateway/proxy/app_1", targetURL)
}
}
| joroKr21/incubator-zeppelin | flink/interpreter/src/test/scala/org/apache/zeppelin/flink/FlinkScalaInterpreterTest.scala | Scala | apache-2.0 | 1,688 |
package chat.client.gui
import java.awt._
import java.awt.event.{ActionEvent, ActionListener}
import javax.swing._
import javax.swing.border.LineBorder
import library.services.LibraryException
class LoginDialog(var ctrl: TerminalController) extends JDialog
{
private var tfUsername: JTextField = null
private var pfPassword: JPasswordField = null
private var lbUsername: JLabel = null
private var lbPassword: JLabel = null
private var btnLogin: JButton = null
private var btnCancel: JButton = null
val panel: JPanel = new JPanel(new GridBagLayout)
val cs: GridBagConstraints = new GridBagConstraints
val bp: JPanel = new JPanel
val self = this
cs.fill = GridBagConstraints.HORIZONTAL
lbUsername = new JLabel("Username: ")
cs.gridx = 0
cs.gridy = 0
cs.gridwidth = 1
panel.add(lbUsername, cs)
tfUsername = new JTextField(20)
cs.gridx = 1
cs.gridy = 0
cs.gridwidth = 2
panel.add(tfUsername, cs)
lbPassword = new JLabel("Password: ")
cs.gridx = 0
cs.gridy = 1
cs.gridwidth = 1
panel.add(lbPassword, cs)
pfPassword = new JPasswordField(20)
cs.gridx = 1
cs.gridy = 1
cs.gridwidth = 2
panel.add(pfPassword, cs)
panel.setBorder(new LineBorder(Color.GRAY))
btnLogin = new JButton("Login")
// https://stackoverflow.com/questions/3239106/scala-actionlistener-anonymous-function-type-mismatch
implicit def toActionListener(f: ActionEvent => Unit) = new ActionListener
{
def actionPerformed(e: ActionEvent)
{f(e)}
}
btnLogin.addActionListener(new ActionListener()
{
override def actionPerformed(e: ActionEvent)
{
println("Login button pressed.")
try
{
assert(tfUsername != null)
ctrl.login(getUsername, getPassword)
Terminal.run(ctrl)
dispose()
}
catch
{
case ex: LibraryException =>
Terminal.showErrorDialog(self, "Invalid username or password. Details: " + ex.getMessage)
ex.printStackTrace()
pfPassword.setText("")
}
}
})
btnCancel = new JButton("Cancel")
btnCancel.addActionListener
{ e: ActionEvent => dispose() }
bp.add(btnLogin)
bp.add(btnCancel)
getContentPane.add(panel, BorderLayout.CENTER)
getContentPane.add(bp, BorderLayout.PAGE_END)
pack()
setVisible(true)
setLocationRelativeTo(null)
def getUsername: String =
{
tfUsername.getText.trim
}
def getPassword: String =
{
new String(pfPassword.getPassword)
}
}
| leyyin/university | systems-for-design-and-implementation/labs/lab3/LibraryClient/src/chat/client/gui/LoginDialog.scala | Scala | mit | 2,723 |
package org.aja.dhira.nnql
/**
* Created by mageswaran on 29/5/16.
*/
class PrettyPrinter {
def apply(expr: Expression): String = expr match {
case CREATE(arg, body) => p"CREATE $arg $body"
case Number(i) => i.toString
case FunctionApply(fun, arg) => p"$fun $arg"
case Variable(name, scope) => s"$name"
}
implicit class PrettyPrinting(val sc: StringContext) {
def p(args: Expression*) = sc.s((args map parensIfNeeded): _*)
}
def parensIfNeeded(expr: Expression) = expr match {
case v: Variable => apply(v)
case _ => "(" + apply(expr) + ")"
}
}
| Mageswaran1989/aja | src/main/scala/org/aja/dhira/src/main/scala/org/dhira/core/nnql/PrettyPrinter.scala | Scala | apache-2.0 | 605 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.repl
import java.net.URL
import org.apache.spark.SparkEnv
import scala.tools.nsc.interpreter.AbstractFileClassLoader
import scala.tools.nsc.util.ScalaClassLoader.URLClassLoader
/**
* Various utils needed to work for the interpreters. Mainly this object is used to register repl classloader, which is
* shared among all the interpreters.
* For the first time the H2OIMain is initialized, id creates the repl classloader and stores it here. Other instances
* of H2OIMain then obtain the classloader from here.
*/
object InterpreterUtils {
private var _replClassLoader: AbstractFileClassLoader = {
if (Main.interp != null) {
SparkEnv.get.serializer.setDefaultClassLoader(Main.interp.intp.classLoader)
SparkEnv.get.closureSerializer.setDefaultClassLoader(Main.interp.intp.classLoader)
Main.interp.intp.classLoader
} else {
null
}
}
private var _runtimeClassLoader: URLClassLoader with ExposeAddUrl = null // wrapper exposing addURL
def getClassOutputDir = {
if (Main.interp != null) {
Main.interp.intp.getClassOutputDirectory
} else {
ReplCLassServer.getClassOutputDirectory
}
}
def classServerUri = {
if (Main.interp != null) {
Main.interp.intp.classServerUri
} else {
if (!ReplCLassServer.isRunning) {
ReplCLassServer.start()
}
ReplCLassServer.classServerUri
}
}
def REPLCLassLoader = this.synchronized{
_replClassLoader
}
def ensureREPLClassLoader(classLoader: AbstractFileClassLoader) = this.synchronized{
if(_replClassLoader == null) {
_replClassLoader = classLoader
SparkEnv.get.serializer.setDefaultClassLoader(_replClassLoader)
SparkEnv.get.closureSerializer.setDefaultClassLoader(_replClassLoader)
}
}
def resetREPLCLassLoader() : Unit = this.synchronized{
_replClassLoader = null
}
def runtimeClassLoader = this.synchronized{
_runtimeClassLoader
}
def ensureRuntimeCLassLoader(classLoader: URLClassLoader with ExposeAddUrl) = this.synchronized{
if(_runtimeClassLoader == null){
_runtimeClassLoader = classLoader
}
}
def addUrlsToClasspath(urls: URL*): Unit = {
if (Main.interp != null) {
Main.interp.intp.addUrlsToClassPath(urls: _*)
} else {
urls.foreach(_runtimeClassLoader.addNewUrl)
}
}
}
private[repl] trait ExposeAddUrl extends URLClassLoader {
def addNewUrl(url: URL) = this.addURL(url)
} | tromika/sparkling-water | core/src/main/scala/org/apache/spark/repl/InterpreterUtils.scala | Scala | apache-2.0 | 3,244 |
import controllers.WebpackController
import org.scalatestplus.play._
import org.scalatestplus.play.guice.GuiceOneAppPerTest
import play.api.test.Helpers._
import play.api.test._
class ControllerSpec extends PlaySpec with GuiceOneAppPerTest {
"WebpackController GET" should {
"render a javascript method" in {
val controller = app.injector.instanceOf[WebpackController]
val home = controller.index().apply(FakeRequest())
status(home) mustBe OK
contentAsString(home) must include ("This is rendered in JS")
}
"render an async javascript method" in {
val controller = app.injector.instanceOf[WebpackController]
val home = controller.asyncRenderedJs().apply(FakeRequest())
status(home) mustBe OK
contentAsString(home) must include ("This is an async resolved String")
}
}
}
| BowlingX/play-webpack | src/play-module/src/test/scala/ControllerSpec.scala | Scala | mit | 844 |
/*
* Copyright (C) 2009-2018 Lightbend Inc. <https://www.lightbend.com>
*/
package play.libs
import java.io.File
import org.specs2.mutable.Specification
import org.xml.sax.SAXException
class XMLSpec extends Specification {
"The Java XML support" should {
def parse(xml: String) = {
XML.fromString(xml)
}
def writeStringToFile(file: File, text: String) = {
val out = java.nio.file.Files.newOutputStream(file.toPath)
try {
out.write(text.getBytes("utf-8"))
} finally {
out.close()
}
}
"parse XML bodies" in {
parse("<foo>bar</foo>").getChildNodes.item(0).getNodeName must_== "foo"
}
"parse XML bodies without loading in a related schema" in {
val f = File.createTempFile("xxe", ".txt")
writeStringToFile(f, "I shouldn't be there!")
f.deleteOnExit()
val xml = s"""<?xml version="1.0" encoding="ISO-8859-1"?>
| <!DOCTYPE foo [
| <!ELEMENT foo ANY >
| <!ENTITY xxe SYSTEM "${f.toURI}">]><foo>hello&xxe;</foo>""".stripMargin
parse(xml) must throwA[RuntimeException].like {
case re => re.getCause must beAnInstanceOf[SAXException]
}
}
"parse XML bodies without loading in a related schema from a parameter" in {
val externalParameterEntity = File.createTempFile("xep", ".dtd")
val externalGeneralEntity = File.createTempFile("xxe", ".txt")
writeStringToFile(
externalParameterEntity,
s"""
|<!ENTITY % xge SYSTEM "${externalGeneralEntity.toURI}">
|<!ENTITY % pe "<!ENTITY xxe '%xge;'>">
""".stripMargin)
writeStringToFile(externalGeneralEntity, "I shouldnt be there!")
externalGeneralEntity.deleteOnExit()
externalParameterEntity.deleteOnExit()
val xml = s"""<?xml version="1.0" encoding="ISO-8859-1"?>
| <!DOCTYPE foo [
| <!ENTITY % xpe SYSTEM "${externalParameterEntity.toURI}">
| %xpe;
| %pe;
| ]><foo>hello&xxe;</foo>""".stripMargin
parse(xml) must throwA[RuntimeException].like {
case re => re.getCause must beAnInstanceOf[SAXException]
}
}
"gracefully fail when there are too many nested entities" in {
val nested = for (x <- 1 to 30) yield "<!ENTITY laugh" + x + " \\"&laugh" + (x - 1) + ";&laugh" + (x - 1) + ";\\">"
val xml = s"""<?xml version="1.0"?>
| <!DOCTYPE billion [
| <!ELEMENT billion (#PCDATA)>
| <!ENTITY laugh0 "ha">
| ${nested.mkString("\\n")}
| ]>
| <billion>&laugh30;</billion>""".stripMargin
parse(xml) must throwA[RuntimeException].like {
case re => re.getCause must beAnInstanceOf[SAXException]
}
}
"gracefully fail when an entity expands to be very large" in {
val as = "a" * 50000
val entities = "&a;" * 50000
val xml = s"""<?xml version="1.0"?>
| <!DOCTYPE kaboom [
| <!ENTITY a "$as">
| ]>
| <kaboom>$entities</kaboom>""".stripMargin
parse(xml) must throwA[RuntimeException].like {
case re => re.getCause must beAnInstanceOf[SAXException]
}
}
}
}
| Shenker93/playframework | framework/src/play/src/test/scala/play/libs/XMLSpec.scala | Scala | apache-2.0 | 3,353 |
package io.skysail.server.website.html
import play.twirl.api.Html
import html.main
import io.skysail.server.RepresentationModel
object PostBookmarkResource_Get extends _root_.play.twirl.api.BaseScalaTemplate[play.twirl.api.HtmlFormat.Appendable,_root_.play.twirl.api.Format[play.twirl.api.HtmlFormat.Appendable]](play.twirl.api.HtmlFormat) with _root_.play.twirl.api.Template1[RepresentationModel,play.twirl.api.HtmlFormat.Appendable] {
/*************************************
* Home page. *
* *
* @param msg The message to display *
*************************************/
def apply/*6.2*/(rep: RepresentationModel):play.twirl.api.HtmlFormat.Appendable = {
_display_ {
{
Seq[Any](format.raw/*6.28*/("""
"""),_display_(/*8.2*/main/*8.6*/ {_display_(Seq[Any](format.raw/*8.8*/("""
"""),format.raw/*10.1*/("""<br><br><br>
<div class="container">
<div class="starter-template">
<h1>Bookmarks</h1>
<p class="lead">add bookmark:</p>
<form action='"""),_display_(/*16.24*/rep/*16.27*/.linkFor("io.skysail.app.bookmarks.PostBookmarkResource", None)),format.raw/*16.90*/("""' method="post">
<table class="table table-sm">
<thead>
<tr>
<th>Title</th>
<th>Url</th>
</tr>
</thead>
<tbody>
<tr>
<th scope="row"><input type="text" name="title"/></th>
<td><input type="url" name="url"/></td>
</tr>
<tr>
<th colspan="2">
<input type="submit">
</th>
</tr>
</tbody>
</table>
</form>
</div>
</div>
""")))}))
}
}
}
def render(rep:RepresentationModel): play.twirl.api.HtmlFormat.Appendable = apply(rep)
def f:((RepresentationModel) => play.twirl.api.HtmlFormat.Appendable) = (rep) => apply(rep)
def ref: this.type = this
}
/*
-- GENERATED --
DATE: Tue Dec 05 11:16:45 CET 2017
SOURCE: C:/git/skysail-server/skysail.server.website/./resources/templates/io/skysail/server/website/PostBookmarkResource_Get.scala.html
HASH: f4d7c35d668ac555f4a3a51a711cb5ae2ee85a48
MATRIX: 653->193|774->219|802->222|813->226|851->228|880->230|1072->395|1084->398|1168->461
LINES: 15->6|20->6|22->8|22->8|22->8|24->10|30->16|30->16|30->16
-- GENERATED --
*/
| evandor/skysail-server | skysail.server.website/src/io/skysail/server/website/html/PostBookmarkResource_Get.template.scala | Scala | apache-2.0 | 2,655 |
package scodec
package codecs
import scalaz.\/
import scalaz.syntax.std.option._
import org.scalacheck.Gen
import scodec.bits.BitVector
class ShortCodecTest extends CodecSuite {
def check(low: Short, high: Short)(f: (Short) => Unit) {
forAll(Gen.choose(low, high)) { n =>
whenever(n >= low) { f(n) }
}
}
"the short16 codec" should { "roundtrip" in { forAll { (n: Short) => roundtrip(short16, n) } } }
"the short16L codec" should { "roundtrip" in { forAll { (n: Short) => roundtrip(short16L, n) } } }
"the ushort(n) codec" should { "roundtrip" in { forAll { (n: Short) => whenever(n >= 0) { roundtrip(ushort(15), n) } } } }
"the ushortL(n) codec" should { "roundtrip" in { forAll { (n: Short) => whenever(n >= 0) { roundtrip(ushortL(15), n) } } } }
"the short codecs" should {
"support endianess correctly" in {
forAll { (n: Short) =>
val bigEndian = short16.encode(n).toOption.err("big").toByteVector
val littleEndian = short16L.encode(n).toOption.err("little").toByteVector
littleEndian shouldBe bigEndian.reverse
}
check(0, 8191) { (n: Short) =>
whenever(n >= 0 && n <= 8191) {
val bigEndian = ushort(13).encodeValid(n)
val littleEndian = ushortL(13).encodeValid(n).toByteVector
val flipped = BitVector(littleEndian.last).take(5) ++ littleEndian.init.reverse.toBitVector
flipped shouldBe bigEndian
}
}
}
"return an error when value to encode is out of legal range" in {
short(15).encode(Short.MaxValue) shouldBe \/.left(Err("32767 is greater than maximum value 16383 for 15-bit signed short"))
short(15).encode(Short.MinValue) shouldBe \/.left(Err("-32768 is less than minimum value -16384 for 15-bit signed short"))
ushort(15).encode(-1) shouldBe \/.left(Err("-1 is less than minimum value 0 for 15-bit unsigned short"))
}
"return an error when decoding with too few bits" in {
short16.decode(BitVector.low(8)) shouldBe \/.left(Err.insufficientBits(16, 8))
}
}
}
| danielwegener/scodec | src/test/scala/scodec/codecs/ShortCodecTest.scala | Scala | bsd-3-clause | 2,054 |
package rwsscala.ichiba
import rwsscala.util._
import scalaz._, Scalaz._
sealed trait MaxPrice extends Parameter
object MaxPrice {
private case class On(value: Long) extends MaxPrice {
def param = Seq("maxPrice" -> value.toString)
}
case object Off extends MaxPrice {
def param = Seq()
}
def apply(value: Long): MaxPrice = value match {
case v if v < 0L => On(0L)
case _ => On(value)
}
def unapply(m: MaxPrice): Option[Long] = m match {
case On(v) => v.some
case Off => none
}
}
trait MaxPrices {
implicit def long2maxPrice(value: Long): MaxPrice = MaxPrice(value)
implicit def int2maxPrice(value: Int): MaxPrice = long2maxPrice(value)
implicit def longOpt2maxPrice(opt: Option[Long]): MaxPrice = opt β long2maxPrice | MaxPrice.Off
implicit def intOpt2maxPrice(opt: Option[Int]): MaxPrice = opt β int2maxPrice | MaxPrice.Off
}
| nisshiee/rws-scala | core/src/main/scala/core/ichiba/request/MaxPrice.scala | Scala | mit | 889 |
/* __ *\
** ________ ___ / / ___ __ ____ Scala.js API **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013-2015, LAMP/EPFL **
** __\ \/ /__/ __ |/ /__/ __ |/_// /_\ \ http://scala-lang.org/ **
** /____/\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\* */
package scala.scalajs.js.typedarray
import scala.language.implicitConversions
import java.nio._
/** Additional operations on a [[Buffer]] with interoperability with
* JavaScript Typed Arrays.
*
* All Scala.js implementations of [[Buffer]] also implement this interface
* for some TypedArrayType, which depends on the type of elements in the
* buffer.
*/
final class TypedArrayBufferOps[ // scalastyle:ignore
TypedArrayType <: TypedArray[_, TypedArrayType]](
val buffer: Buffer) extends AnyVal {
/** Tests whether this buffer has a valid associated [[ArrayBuffer]].
*
* This is true iff the buffer is direct and not read-only.
*/
def hasArrayBuffer(): Boolean =
TypedArrayBufferBridge.Buffer_hasArrayBuffer(buffer)
/** [[ArrayBuffer]] backing this buffer _(optional operation)_.
*
* @throws UnsupportedOperationException
* If this buffer has no backing [[ArrayBuffer]], i.e., !hasArrayBuffer()
*/
def arrayBuffer(): ArrayBuffer =
TypedArrayBufferBridge.Buffer_arrayBuffer(buffer)
/** Byte offset in the associated [[ArrayBuffer]] _(optional operation)_.
*
* @throws UnsupportedOperationException
* If this buffer has no backing [[ArrayBuffer]], i.e., !hasArrayBuffer()
*/
def arrayBufferOffset(): Int =
TypedArrayBufferBridge.Buffer_arrayBufferOffset(buffer)
/** [[DataView]] of the backing [[ArrayBuffer]] _(optional operation)_.
*
* The [[DataView]] is sliced to the portion of the [[ArrayBuffer]] seen by
* this [[Buffer]].
*
* @throws UnsupportedOperationException
* If this buffer has no backing [[ArrayBuffer]], i.e., !hasArrayBuffer()
*/
def dataView(): DataView =
TypedArrayBufferBridge.Buffer_dataView(buffer)
/** Tests whether this direct buffer has a valid associated [[TypedArray]].
*
* If this buffer is read-only, returns false.
*
* For read-write buffers:
*
* * Direct Byte buffers always have an associated [[TypedArray]].
* * Long buffers never do.
* * Other kinds of direct buffers have an associated [[TypedArray]] if and
* only if their byte order is the native order of the platform.
*/
def hasTypedArray(): Boolean =
TypedArrayBufferBridge.Buffer_hasTypedArray(buffer)
/** [[TypedArray]] backing this direct buffer _(optional operation)_.
*
* The [[TypedArray]] is sliced to the portion of the [[ArrayBuffer]] seen
* by this [[Buffer]].
*
* @throws UnsupportedOperationException
* If this buffer does not have a backing [[TypedArray]], i.e., !hasTypedArray().
*/
def typedArray(): TypedArrayType =
TypedArrayBufferBridge.Buffer_typedArray(buffer).asInstanceOf[TypedArrayType]
}
/** Extensions to [[Buffer]]s for interoperability with JavaScript Typed Arrays.
*/
object TypedArrayBufferOps {
implicit def bufferOps(buffer: Buffer): TypedArrayBufferOps[_ <: TypedArray[_, _]] =
new TypedArrayBufferOps(buffer)
implicit def byteBufferOps(buffer: ByteBuffer): TypedArrayBufferOps[Int8Array] =
new TypedArrayBufferOps(buffer)
implicit def charBufferOps(buffer: CharBuffer): TypedArrayBufferOps[Uint16Array] =
new TypedArrayBufferOps(buffer)
implicit def shortBufferOps(buffer: ShortBuffer): TypedArrayBufferOps[Int16Array] =
new TypedArrayBufferOps(buffer)
implicit def intBufferOps(buffer: IntBuffer): TypedArrayBufferOps[Int32Array] =
new TypedArrayBufferOps(buffer)
implicit def longBufferOps(buffer: LongBuffer): TypedArrayBufferOps[Nothing] =
new TypedArrayBufferOps(buffer)
implicit def floatBufferOps(buffer: FloatBuffer): TypedArrayBufferOps[Float32Array] =
new TypedArrayBufferOps(buffer)
implicit def doubleBufferOps(buffer: DoubleBuffer): TypedArrayBufferOps[Float64Array] =
new TypedArrayBufferOps(buffer)
}
| CapeSepias/scala-js | library/src/main/scala/scala/scalajs/js/typedarray/TypedArrayBufferOps.scala | Scala | bsd-3-clause | 4,328 |
package com.stomp
import java.net.Socket
/**
* Created by vmp on 8/8/14.
*/
object STOMPServer {
val serverSocket = new Socket()
}
| vitormp/stomps | src/main/scala/com/stomp/STOMPServer.scala | Scala | lgpl-3.0 | 139 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// scalastyle:off println
package org.apache.spark.examples
import scala.collection.mutable
import scala.util.Random
import org.apache.spark.sql.SparkSession
/**
* Transitive closure on a graph.
*/
object SparkTC {
val numEdges = 200
val numVertices = 100
val rand = new Random(42)
def generateGraph: Seq[(Int, Int)] = {
val edges: mutable.Set[(Int, Int)] = mutable.Set.empty
while (edges.size < numEdges) {
val from = rand.nextInt(numVertices)
val to = rand.nextInt(numVertices)
if (from != to) edges.+=((from, to))
}
edges.toSeq
}
def main(args: Array[String]) {
val spark = SparkSession
.builder
.appName("SparkTC")
.getOrCreate()
val slices = if (args.length > 0) args(0).toInt else 2
var tc = spark.sparkContext.parallelize(generateGraph, slices).cache()
// Linear transitive closure: each round grows paths by one edge,
// by joining the graph's edges with the already-discovered paths.
// e.g. join the path (y, z) from the TC with the edge (x, y) from
// the graph to obtain the path (x, z).
// Because join() joins on keys, the edges are stored in reversed order.
val edges = tc.map(x => (x._2, x._1))
// This join is iterated until a fixed point is reached.
var oldCount = 0L
var nextCount = tc.count()
do {
oldCount = nextCount
// Perform the join, obtaining an RDD of (y, (z, x)) pairs,
// then project the result to obtain the new (x, z) paths.
tc = tc.union(tc.join(edges).map(x => (x._2._2, x._2._1))).distinct().cache()
nextCount = tc.count()
} while (nextCount != oldCount)
println("TC has " + tc.count() + " edges.")
spark.stop()
}
}
// scalastyle:on println
| mrchristine/spark-examples-dbc | src/main/scala/org/apache/spark/examples/SparkTC.scala | Scala | apache-2.0 | 2,550 |
import org.scalatra.LifeCycle
import javax.servlet.ServletContext
import com.quinsoft.northwind._
class ScalatraBootstrap extends LifeCycle {
override def init(context: ServletContext) {
context mount (new NorthwindScalatra, "/*")
}
}
| DeegC/Zeidon-Northwind | scalatra/src/main/scala/ScalatraBootstrap.scala | Scala | apache-2.0 | 246 |
package chat.tox.antox.tox
import java.io._
import java.util
import java.util.Collections
import android.content.{Context, SharedPreferences}
import android.net.ConnectivityManager
import android.preference.PreferenceManager
import chat.tox.antox.data.{AntoxDB, State}
import chat.tox.antox.utils._
import chat.tox.antox.wrapper.{ToxCore, _}
import im.tox.core.network.Port
import im.tox.tox4j.core.data.ToxPublicKey
import im.tox.tox4j.core.enums.ToxUserStatus
import im.tox.tox4j.core.options.ToxOptions
import org.json.{JSONException, JSONObject}
import org.scaloid.common.LoggerTag
import rx.lang.scala.Observable
import rx.lang.scala.schedulers.{IOScheduler, NewThreadScheduler}
object ToxSingleton {
var tox: ToxCore = _
var toxAv: ToxAv = _
private var groupList: GroupList = _
var dataFile: ToxDataFile = _
var qrFile: File = _
var typingMap: util.HashMap[ContactKey, Boolean] = new util.HashMap[ContactKey, Boolean]()
var isInited: Boolean = false
private val nodeFileName = "Nodefile.json"
private var dhtNodes = Seq[DhtNode]()
def interval: Int = {
Math.min(State.transfers.interval, tox.interval)
}
def getGroupList: GroupList = groupList
def getGroup(groupNumber: Int): Group = getGroupList.getGroup(groupNumber)
def getGroup(groupKey: GroupKey): Group = getGroupList.getGroup(groupKey)
def getGroupPeer(groupNumber: Int, peerNumber: Int): GroupPeer = getGroupList.getPeer(groupNumber, peerNumber)
def changeActiveKey(key: ContactKey) {
State.activeKey.onNext(Some(key))
}
def exportDataFile(dest: File): Unit = {
dataFile.exportFile(dest)
ToxSingleton.save()
}
def bootstrap(ctx: Context, updateNodes: Boolean = false): Boolean = {
val TAG = LoggerTag("Bootstrap")
if (updateNodes) updateCachedDhtNodes(ctx)
dhtNodes =
(if (dhtNodes.isEmpty) None else Some(dhtNodes))
.orElse(readCachedDhtNodes(ctx))
.orElse({
// if all else fails, try to pull the nodes from the server again
updateCachedDhtNodes(ctx)
readCachedDhtNodes(ctx)
})
.getOrElse(Nil)
//avoid always hitting the first node in the list
Collections.shuffle(util.Arrays.asList(dhtNodes: _*))
AntoxLog.debug("Trying to bootstrap", TAG)
AntoxLog.debug("Current nodes: " + dhtNodes.mkString("|"), TAG)
var bootstrapped = false
for (i <- dhtNodes.indices) {
try {
AntoxLog.debug(s"Bootstrapping to ${dhtNodes(i).ipv4}:${dhtNodes(i).port.value}", TAG)
tox.bootstrap(dhtNodes(i).ipv4, dhtNodes(i).port, dhtNodes(i).key)
bootstrapped = true
} catch {
case _: Exception =>
AntoxLog.error(s"Couldn't bootstrap to node ${dhtNodes(i).ipv4}:${dhtNodes(i).port.value}")
}
}
if (bootstrapped) {
AntoxLog.debug("Successfully bootstrapped", TAG)
true
} else if (!updateNodes) { //prevent infinite loop
AntoxLog.debug("Could not find a node to bootstrap to, fetching new Nodefile and trying again", TAG)
bootstrap(ctx, updateNodes = true)
} else {
AntoxLog.debug("Failed to bootstrap", TAG)
false
}
}
def updateCachedDhtNodes(ctx: Context): Unit = {
val nodeFileUrl = "https://nodes.tox.chat/json"
JsonReader.readFromUrl(nodeFileUrl) match {
case Some(downloadedJson) =>
FileUtils.writePrivateFile(nodeFileName, downloadedJson, ctx)
case None =>
AntoxLog.debug("Failed to download nodefile")
}
}
def readCachedDhtNodes(ctx: Context): Option[Seq[DhtNode]] = {
val savedNodeFile = new File(ctx.getFilesDir, nodeFileName)
for (
json <- JsonReader.readJsonFromFile(savedNodeFile);
nodes <- parseDhtNodes(json)
) yield nodes
}
private def parseDhtNodes(json: JSONObject): Option[Seq[DhtNode]] = {
try {
var dhtNodes: Array[DhtNode] = Array()
val serverArray = json.getJSONArray("nodes")
for (i <- 0 until serverArray.length) {
val jsonObject = serverArray.getJSONObject(i)
if (jsonObject.getBoolean("status_tcp")) {
dhtNodes +:= DhtNode(
jsonObject.getString("maintainer"),
jsonObject.getString("ipv4"),
ToxPublicKey.unsafeFromValue(Hex.hexStringToBytes(jsonObject.getString("public_key"))),
Port.unsafeFromInt(jsonObject.getInt("port")))
}
}
Some(dhtNodes)
} catch {
case e: JSONException =>
e.printStackTrace()
None
}
}
private def bootstrapFromCustomNode(preferences: SharedPreferences) = {
try {
val ip = preferences.getString("custom_node_address", "127.0.0.1")
val port = Port.unsafeFromInt(preferences.getString("custom_node_port", "33445").toInt)
val address = ToxPublicKey.unsafeFromValue(Hex.hexStringToBytes(preferences.getString("custom_node_key", "")))
val node = DhtNode("custom", ip, address, port)
tox.bootstrap(node.ipv4, node.port, node.key)
} catch {
case e: Exception =>
AntoxLog.error("Failed to bootstrap from custom node")
e.printStackTrace()
}
}
def isToxConnected(preferences: SharedPreferences, context: Context): Boolean = {
val connManager = context.getSystemService(Context.CONNECTIVITY_SERVICE).asInstanceOf[ConnectivityManager]
val wifiOnly = preferences.getBoolean("wifi_only", true)
val wifiInfo = connManager.getNetworkInfo(ConnectivityManager.TYPE_WIFI)
!(wifiOnly && !wifiInfo.isConnected)
}
def initTox(ctx: Context) {
isInited = true
val preferences = PreferenceManager.getDefaultSharedPreferences(ctx)
val userDb = State.userDb(ctx)
groupList = new GroupList()
qrFile = ctx.getFileStreamPath("userkey_qr.png")
dataFile = new ToxDataFile(ctx, userDb.getActiveUser)
val udpEnabled = preferences.getBoolean("enable_udp", false)
val proxyOptions = ProxyUtils.toxProxyFromPreferences(preferences)
val options = ToxOptions(
ipv6Enabled = Options.ipv6Enabled,
proxy = proxyOptions,
udpEnabled = udpEnabled,
saveData = dataFile.loadAsSaveType())
try {
tox = new ToxCore(groupList, options)
if (!dataFile.doesFileExist()) dataFile.saveFile(tox.getSaveData)
val editor = preferences.edit()
editor.putString("tox_id", tox.getAddress.toString)
editor.commit()
State.db = new AntoxDB(ctx, userDb.getActiveUser, tox.getSelfKey)
val db = State.db
toxAv = new ToxAv(tox.getTox)
db.clearFileNumbers()
db.setAllOffline()
db.synchroniseWithTox(tox)
val details = userDb.getActiveUserDetails
tox.setName(details.nickname)
tox.setStatusMessage(details.statusMessage)
var newStatus: ToxUserStatus = ToxUserStatus.NONE
val newStatusString = details.status
newStatus = UserStatus.getToxUserStatusFromString(newStatusString)
tox.setStatus(newStatus)
Observable[Boolean](_ =>
if (preferences.getBoolean("enable_custom_node", false)) {
bootstrapFromCustomNode(preferences)
} else {
bootstrap(ctx)
})
.subscribeOn(IOScheduler())
.observeOn(NewThreadScheduler())
.subscribe()
} catch {
case e: Exception => e.printStackTrace()
}
}
def save(): Unit = {
dataFile.saveFile(tox.getSaveData)
}
}
| subliun/Antox | app/src/main/scala/chat/tox/antox/tox/ToxSingleton.scala | Scala | gpl-3.0 | 7,363 |
package one.lockstep.vault
import one.lockstep.lock.MultilockIntegrationFixtures
class MultilockVaultSpec extends AbstractVaultSpec with MultilockIntegrationFixtures {
override lazy val lockManagerFixtureProvider = MultilockManagerIntegFixture.provider
}
| lockstep-one/vault | vault-test/src/test/scala/one/lockstep/vault/MultilockVaultSpec.scala | Scala | agpl-3.0 | 259 |
package logs
opaque type Logarithm = Double
object Logarithm {
// These are the ways to lift to the logarithm type
def apply(d: Double): Logarithm = math.log(d)
def safe(d: Double): Option[Logarithm] =
if (d > 0.0) Some(math.log(d)) else None
// This is the first way to unlift the logarithm type
def exponent(l: Logarithm): Double = l
given AnyRef with {
// This is the second way to unlift the logarithm type
extension (x: Logarithm) def toDouble: Double = math.exp(x)
extension (x: Logarithm) def + (y: Logarithm) = Logarithm(math.exp(x) + math.exp(y))
extension (x: Logarithm) def * (y: Logarithm): Logarithm = Logarithm(x + y)
}
}
| dotty-staging/dotty | tests/pos/toplevel-opaque-xm/Logarithm_1.scala | Scala | apache-2.0 | 677 |
package web
import akka.io.IO
import api.Api
import spray.can.Http
//trait RestWebServer extends CoreActors with Core with Api {
// this: Api with CoreActors with Core =>
trait RestWebServer {
this: Api =>
IO(Http)(system) ! Http.Bind(rootService, "0.0.0.0", port = 8080)
}
| mmilewski/14-marbles-game-scala | src/main/scala/web/RestWebServer.scala | Scala | apache-2.0 | 283 |
package pl.newicom.dddd.messaging.event
import org.joda.time.DateTime
case class EventMessageEntry(msg: OfficeEventMessage, position: Long, created: Option[DateTime])
| pawelkaczor/akka-ddd | akka-ddd-messaging/src/main/scala/pl/newicom/dddd/messaging/event/EventMessageEntry.scala | Scala | mit | 169 |
import org.scalatest._
/**
* Contains tests the graph-based functionality of the engine behind the
* language.
*/
class GraphTest {//extends FlatSpec with Matchers {
/**
* Tests the blocking a lane belonging to a street with a given name.
*/
// def blockLane(): Unit = {
// }
}
| dannydes/FYP | src/test/GraphTest.scala | Scala | gpl-2.0 | 295 |
val a: (1, 2, 3) = (1, 2, 3)
val b: (4, 5, 6) = (4, 5, 6)
val c: (7, 8) = (7, 8)
val d: Unit = ()
// Zip
val r1: ((1, 4), (2, 5), (3, 6)) = a.zip(b)
val r2: ((1, 7), (2, 8)) = a.zip(c)
val r3: ((7, 1), (8, 2)) = c.zip(a)
val r4: Unit = d.zip(a)
val r5: Unit = a.zip(d)
// Map
case class Foo[X](x: X)
val r6: (Int, Int, Int) = a.map[[t] =>> Int]([t] => (x: t) => x match {
case x: Int => x * x
case _ => ???
})
val r7: ((1, Foo[1]), (2, Foo[2]), (3, Foo[3])) =
a.map[[t] =>> (t, Foo[t])]( [t] => (x: t) => (x, Foo(x)) )
// More Zip
val t1: Int *: Long *: Tuple = (1, 2l, 100, 200)
val t2: Int *: Char *: Tuple = (1, 'c', 33, 42)
val t3: (Int, Int) *: (Long, Char) *: Tuple = t1.zip(t2)
val t4: Unit = d.zip(d)
@main def Test =
List(r1, r2, r3, r4, r5, r6, r7, t3, t4).foreach(println)
| som-snytt/dotty | tests/run/tuple-ops.scala | Scala | apache-2.0 | 799 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.plan.batch.sql.agg
import org.apache.flink.table.api.AggPhaseEnforcer.AggPhaseEnforcer
import org.apache.flink.table.api.{AggPhaseEnforcer, OperatorType, PlannerConfigOptions, TableConfigOptions}
import org.junit.Before
import org.junit.runner.RunWith
import org.junit.runners.Parameterized
import java.util
import scala.collection.JavaConversions._
@RunWith(classOf[Parameterized])
class SortAggregateTest(aggStrategy: AggPhaseEnforcer) extends AggregateTestBase {
@Before
def before(): Unit = {
// disable hash agg
util.tableEnv.getConfig.getConf.setString(
TableConfigOptions.SQL_EXEC_DISABLED_OPERATORS, OperatorType.HashAgg.toString)
util.tableEnv.getConfig.getConf.setString(
PlannerConfigOptions.SQL_OPTIMIZER_AGG_PHASE_ENFORCER, aggStrategy.toString)
}
}
object SortAggregateTest {
@Parameterized.Parameters(name = "aggStrategy={0}")
def parameters(): util.Collection[AggPhaseEnforcer] = {
Seq[AggPhaseEnforcer](
AggPhaseEnforcer.NONE,
AggPhaseEnforcer.ONE_PHASE,
AggPhaseEnforcer.TWO_PHASE
)
}
}
| shaoxuan-wang/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/plan/batch/sql/agg/SortAggregateTest.scala | Scala | apache-2.0 | 1,912 |
/*
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.flaminem.flamy.parsing.hive
import com.flaminem.flamy.conf.FlamyContext
import com.flaminem.flamy.parsing.model.TableDependency
/**
* Created by fpin on 7/30/15.
*/
object PopulateParser extends Parser[TableDependency] {
def unsafeParseQuery(query: String)(implicit context: FlamyContext): Seq[TableDependency] = {
PopulateParserInfo.parseQuery(query)
}
}
| flaminem/flamy | src/main/scala/com/flaminem/flamy/parsing/hive/PopulateParser.scala | Scala | apache-2.0 | 951 |
// Databricks notebook source
// MAGIC %md
// MAGIC ScaDaMaLe Course [site](https://lamastex.github.io/scalable-data-science/sds/3/x/) and [book](https://lamastex.github.io/ScaDaMaLe/index.html)
// COMMAND ----------
// MAGIC %md
// MAGIC #Signed Triads in Social Media
// COMMAND ----------
// MAGIC %md
// MAGIC By **Guangyi Zhang** ([email protected])
// MAGIC
// MAGIC Please click [HERE](https://drive.google.com/file/d/1TrxhdSxsU1qKk_SywKf2nUA8mUAO4CG4/view?usp=sharing) to watch the accompanying video.
// COMMAND ----------
// MAGIC %md
// MAGIC ## Introduction
// COMMAND ----------
// MAGIC %md
// MAGIC This project aims to verify the friend-foe motifs in a large-scale signed social network.
// MAGIC
// MAGIC A signed network is a graph that contains both positive and negative links.
// MAGIC The sign of a link contains rich semantics in different appliations.
// MAGIC For example, in a social network, positive links can indicate friendly relationships, while negative ones indicate antagonistic interactions.
// MAGIC
// MAGIC In on-line discussion sites such as Slashdot, users can tag other users as βfriendsβ and βfoesβ.
// MAGIC These provide us exemplary datasets to study a online signed network.
// MAGIC In this notebook we explore the a dataset from Epinions, which contains up to 119,217 nodes, 841,200 edges, and millions of motifs.
// MAGIC Epinions is the trust network of the Epinions product review web site,
// MAGIC where users can indicate their trust or distrust of the reviews of others.
// MAGIC We analyze the network data in an undirected representation.
// MAGIC
// MAGIC References:
// MAGIC
// MAGIC Leskovec, Jure, Daniel Huttenlocher, and Jon Kleinberg. "Signed networks in social media." Proceedings of the SIGCHI conference on human factors in computing systems. 2010.
// COMMAND ----------
// MAGIC %md
// MAGIC Regarding the motifs, we investigate several interesting triads that are related to *structural balance theory* in an online social signed network.
// MAGIC Structural balance originates in social psychology in the mid-20th-century, and considers the possible ways in which triangles on three individuals can be signed.
// MAGIC
// MAGIC Let us explain different types of triads, which is shown in the figure below,
// MAGIC
// MAGIC - T3: βthe friend of my friend is my friendβ
// MAGIC - T1: βthe friend of my enemy is my enemy,β βthe enemy of my friend is my enemyβ and βthe enemy of my enemy is my friendβ
// MAGIC - T2 and T0: does not quite make sense in social network. For example, two friends of mine are unlikely to be enemy to each other.
// MAGIC
// MAGIC Our goal is to compare the numbers of different triads in our appointed dataset.
// MAGIC
// MAGIC
// MAGIC 
// COMMAND ----------
// MAGIC %md
// MAGIC ## Download dataset
// COMMAND ----------
// MAGIC %sh
// MAGIC pwd
// COMMAND ----------
// MAGIC %sh
// MAGIC wget http://snap.stanford.edu/data/soc-sign-epinions.txt.gz
// COMMAND ----------
// MAGIC %sh
// MAGIC ls -l
// COMMAND ----------
// MAGIC %sh
// MAGIC gunzip soc-sign-epinions.txt.gz
// COMMAND ----------
// MAGIC %sh
// MAGIC ls -l
// COMMAND ----------
// MAGIC %sh
// MAGIC head soc-sign-epinions.txt
// COMMAND ----------
// MAGIC %sh
// MAGIC mkdir -p epinions
// MAGIC mv soc-sign-epinions.txt epinions/
// COMMAND ----------
// MAGIC %sh
// MAGIC ls -l /dbfs/FileStore
// MAGIC mv epinions /dbfs/FileStore/
// COMMAND ----------
// MAGIC %fs ls /
// COMMAND ----------
// MAGIC %fs ls /FileStore
// COMMAND ----------
// MAGIC %fs ls file:/databricks/driver
// COMMAND ----------
//%fs mv file:///databricks/driver/epinions /FileStore/
%fs ls /FileStore/epinions/
// COMMAND ----------
// MAGIC %md
// MAGIC ## Preprocess dataset
// COMMAND ----------
import org.apache.spark.sql._
import org.apache.spark.sql.functions._
import org.graphframes._
// This import is needed to use the $-notation
import spark.implicits._
// COMMAND ----------
var df = spark.read.format("csv")
// .option("header", "true")
.option("inferSchema", "true")
.option("comment", "#")
.option("sep", "\\t")
.load("/FileStore/epinions")
// COMMAND ----------
df.count()
// COMMAND ----------
df.rdd.getNumPartitions
// COMMAND ----------
df.head(3)
// COMMAND ----------
df.printSchema()
// COMMAND ----------
val newNames = Seq("src", "dst", "rela")
val e = df.toDF(newNames: _*)
// COMMAND ----------
e.printSchema()
// COMMAND ----------
// Vertex DataFrame
val v = spark.range(1, 131827).toDF("id")
// COMMAND ----------
val g = GraphFrame(v, e)
// COMMAND ----------
g.edges.take(3)
// COMMAND ----------
// MAGIC %md
// MAGIC ## Count triads
// COMMAND ----------
// val results = g.triangleCount.run()
// COMMAND ----------
// MAGIC %md
// MAGIC We can not make use of the convenient API `triangleCount()` because it does not take the sign of edges into consideration.
// MAGIC We need to write our own code to find triads.
// COMMAND ----------
// MAGIC %md
// MAGIC First, a triad should be undirected, but our graph concists of only directed edges.
// MAGIC
// MAGIC One strategy is to keep only bi-direction edges of the same sign.
// MAGIC But we need to examine how large is the proportion of edges we will lose.
// COMMAND ----------
// Search for pairs of vertices with edges in both directions between them, i.e., find undirected or bidirected edges.
val pair = g.find("(a)-[e1]->(b); (b)-[e2]->(a)")
println(pair.count())
val filtered = pair.filter("e1.rela == e2.rela")
println(filtered.count())
// COMMAND ----------
// MAGIC %md
// MAGIC Fortunately, we only lose a very small amount of edges.
// MAGIC
// MAGIC It also makes sense for this dataset, because if A trusts B, then it is quite unlikely that B does not trust A.
// COMMAND ----------
// MAGIC %md
// MAGIC In order to count different triads, first we have to find all triads.
// COMMAND ----------
val triad = g.find("(a)-[eab]->(b); (b)-[eba]->(a); (b)-[ebc]->(c); (c)-[ecb]->(b); (c)-[eca]->(a); (a)-[eac]->(c)")
println(triad.count())
// COMMAND ----------
// MAGIC %md
// MAGIC After finding all triads, we find each type by filtering.
// COMMAND ----------
val t111 = triad.filter("eab.rela = 1 AND eab.rela = ebc.rela AND ebc.rela = eca.rela")
println(t111.count())
// COMMAND ----------
val t000 = triad.filter("eab.rela = -1 AND eab.rela = ebc.rela AND ebc.rela = eca.rela")
println(t000.count())
// COMMAND ----------
val t110 = triad.filter("eab.rela + ebc.rela + eca.rela = 1")
println(t110.count())
// COMMAND ----------
val t001 = triad.filter("eab.rela + ebc.rela + eca.rela = -1")
println(t001.count())
// COMMAND ----------
val n111 = t111.count()
val n001 = t001.count()
val n000 = t000.count()
val n110 = t110.count()
val imbalanced = n000 + n110
val balanced = n111 + n001
// COMMAND ----------
// MAGIC %md
// MAGIC As we can see, the number of balanced triads overwhelms the number of imbalanced ones,
// MAGIC which verifies the effectiveness of structural balance theory.
// COMMAND ----------
// MAGIC %md
// MAGIC ## Duplicates
// COMMAND ----------
// MAGIC %md
// MAGIC Some tests about duplicated motifs
// COMMAND ----------
val g: GraphFrame = examples.Graphs.friends
// COMMAND ----------
display(g.edges)
// COMMAND ----------
val motifs = g.find("(a)-[e]->(b); (b)-[e2]->(a)")
motifs.show()
// COMMAND ----------
// MAGIC %md
// MAGIC As shown above, bi-direction edges are reported twice.
// MAGIC Therefore, each triad is counted three times.
// MAGIC However, this does not matter in our project, because the ratios between different triads remain the same.
// COMMAND ----------
| lamastex/scalable-data-science | dbcArchives/2021/000_0-sds-3-x-projects/student-project-03_group-GuangyiZhang/01_triads.scala | Scala | unlicense | 7,811 |
package unfiltered.spec
import dispatch.classic._
import org.apache.http.client.HttpClient
import java.security.KeyStore
import java.io.FileInputStream
import org.apache.http.conn.ssl.SSLSocketFactory
import org.apache.http.conn.scheme.Scheme
/** Provides an Http client configured to handle ssl certs */
trait SecureClient {
val keyStorePath: String
val keyStorePasswd: String
val securePort: Int
val secureScheme = "https"
val logHttpsRequests = false
/** Silent, resource-managed http request executor which accepts
* non-ok status */
def xhttp[T](handler: Handler[T]): T = {
val h = if(logHttpsRequests) new Http else new Http with NoLogging
try { h.x(handler) }
finally { h.shutdown() }
}
private def secure(cli: HttpClient) = {
val keys = KeyStore.getInstance(KeyStore.getDefaultType)
unfiltered.util.IO.use(new FileInputStream(keyStorePath)) { in =>
keys.load(in, keyStorePasswd.toCharArray)
}
cli.getConnectionManager.getSchemeRegistry.register(
new Scheme(secureScheme, securePort, new SSLSocketFactory(keys))
)
cli
}
/** Slient, resource-managed tls-enabled http request executor */
def https[T](handler: => Handler[T]): T = {
val h = if(logHttpsRequests) new Http {
override def make_client =
secure(super.make_client)
} else new Http with NoLogging {
override def make_client =
secure(super.make_client)
}
try { h(handler) }
finally { h.shutdown() }
}
}
| beni55/unfiltered | specs2/src/main/scala/SecureClient.scala | Scala | mit | 1,500 |
package io.vamp.common.notification
import akka.actor.{AbstractLoggingActor, Actor, Props}
object LoggingNotificationActor {
def props: Props = Props[LoggingNotificationActor]
}
case class Error(notification: Notification, message: String)
case class Info(notification: Notification, message: String)
trait NotificationActor {
this: Actor =>
override def receive: Receive = {
case Error(notification, message) => error(notification, message)
case Info(notification, message) => info(notification, message)
}
def error(notification: Notification, message: String)
def info(notification: Notification, message: String)
}
class LoggingNotificationActor extends AbstractLoggingActor with NotificationActor {
override def error(notification: Notification, message: String): Unit = {
log.error(message)
}
override def info(notification: Notification, message: String): Unit = {
log.info(message)
}
}
| BanzaiMan/vamp-common | src/main/scala/io/vamp/common/notification/NotificationActor.scala | Scala | apache-2.0 | 939 |
/*
*************************************************************************************
* Copyright 2011 Normation SAS
*************************************************************************************
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* In accordance with the terms of section 7 (7. Additional Terms.) of
* the GNU Affero GPL v3, the copyright holders add the following
* Additional permissions:
* Notwithstanding to the terms of section 5 (5. Conveying Modified Source
* Versions) and 6 (6. Conveying Non-Source Forms.) of the GNU Affero GPL v3
* licence, when you create a Related Module, this Related Module is
* not considered as a part of the work and may be distributed under the
* license agreement of your choice.
* A "Related Module" means a set of sources files including their
* documentation that, without modification of the Source Code, enables
* supplementary functions or services in addition to those offered by
* the Software.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/agpl.html>.
*
*************************************************************************************
*/
package bootstrap.liftweb
package checks
import com.normation.rudder.domain.RudderDit
import com.normation.ldap.sdk._
import net.liftweb.common._
import com.normation.cfclerk.domain._
import com.normation.cfclerk.services._
import com.normation.rudder.repository._
import com.normation.rudder.domain.RudderLDAPConstants._
import com.normation.rudder.domain.policies._
import com.normation.utils.Control._
import org.joda.time.DateTime
import com.normation.inventory.ldap.core.LDAPConstants.A_OC
import com.normation.rudder.domain.eventlog.RudderEventActor
import com.normation.rudder.domain.logger.ApplicationLogger
import com.normation.utils.StringUuidGenerator
import com.normation.eventlog.ModificationId
/**
* That class add all the available reference template in
* the default user library
* if it wasn't already initialized.
*/
class CheckInitUserTemplateLibrary(
rudderDit : RudderDit
, ldap : LDAPConnectionProvider[RwLDAPConnection]
, refTemplateService : TechniqueRepository
, roDirectiveRepos : RoDirectiveRepository
, woDirectiveRepos : WoDirectiveRepository
, uuidGen : StringUuidGenerator
) extends BootstrapChecks with Loggable {
override def checks() : Unit = {
ldap.foreach { con =>
con.get(rudderDit.ACTIVE_TECHNIQUES_LIB.dn, A_INIT_DATETIME, A_OC) match {
case e:EmptyBox => ApplicationLogger.error("The root entry of the user template library was not found")
case Full(root) => root.getAsGTime(A_INIT_DATETIME) match {
case Some(date) => ApplicationLogger.debug("The root user template library was initialized on %s".format(date.dateTime.toString("YYYY/MM/dd HH:mm")))
case None =>
ApplicationLogger.info("The Active Technique library is not marked as being initialized: adding all policies from reference library...")
copyReferenceLib() match {
case Full(x) => ApplicationLogger.info("...done")
case eb:EmptyBox =>
val e = eb ?~! "Some error where encountered during the initialization of the user library"
val msg = e.messageChain.split("<-").mkString("\\n ->")
ApplicationLogger.warn(msg)
logger.debug(e.exceptionChain)
}
root += (A_OC, OC_ACTIVE_TECHNIQUE_LIB_VERSION)
root +=! (A_INIT_DATETIME, GeneralizedTime(DateTime.now()).toString)
con.save(root) match {
case eb:EmptyBox =>
val e = eb ?~! "Error when updating information about the LDAP root entry of technique library."
logger.error(e.messageChain)
e.rootExceptionCause.foreach { ex =>
logger.error("Root exception was: ", ex)
}
case _ => // nothing to do
}
}
}
} ; () //foreach should return Unit, see #2770
}
/**
* Actually copy from reference Directive lib to user lib.
*/
private[this] def copyReferenceLib() : Box[AnyRef] = {
def recCopyRef(fromCatId:TechniqueCategoryId, toParentCat:ActiveTechniqueCategory) : Box[ActiveTechniqueCategory] = {
for {
fromCat <- refTemplateService.getTechniqueCategory(fromCatId)
newUserPTCat = ActiveTechniqueCategory(
id = genUserCatId(fromCat)
, name = fromCat.name
, description = fromCat.description
, children = Nil
, items = Nil
)
res <- if(fromCat.isSystem) { //Rudder internal Technique category are handle elsewhere
Full(newUserPTCat)
} else {
for {
updatedParentCat <- woDirectiveRepos.addActiveTechniqueCategory(
newUserPTCat
, toParentCat
, ModificationId(uuidGen.newUuid)
, RudderEventActor
, reason = Some("Initialize active templates library")) ?~!
"Error when adding category '%s' to user library parent category '%s'".format(newUserPTCat.id.value, toParentCat.id.value)
//now, add items and subcategories, in a "try to do the max you can" way
fullRes <- boxSequence(
//Techniques
bestEffort(fromCat.packageIds.groupBy(id => id.name).toSeq) { case (name, ids) =>
for {
activeTechnique <- woDirectiveRepos.addTechniqueInUserLibrary(
newUserPTCat.id
, name
, ids.map( _.version).toSeq
, ModificationId(uuidGen.newUuid)
, RudderEventActor, reason = Some("Initialize active templates library")) ?~!
"Error when adding Technique '%s' into user library category '%s'".format(name.value, newUserPTCat.id.value)
} yield {
activeTechnique
}
} ::
//recurse on children categories of reference lib
bestEffort(fromCat.subCategoryIds.toSeq) { catId => recCopyRef(catId, newUserPTCat) } ::
Nil
)
} yield {
fullRes
}
}
} yield {
newUserPTCat
}
}
//apply with root cat children ids
roDirectiveRepos.getActiveTechniqueLibrary.flatMap { root =>
bestEffort(refTemplateService.getTechniqueLibrary.subCategoryIds.toSeq) { id =>
recCopyRef(id, root)
}
}
}
private[this] def genUserCatId(fromCat:TechniqueCategory) : ActiveTechniqueCategoryId = {
//for the technique ID, use the last part of the path used for the cat id.
ActiveTechniqueCategoryId(fromCat.id.name.value)
}
}
| jooooooon/rudder | rudder-web/src/main/scala/bootstrap/liftweb/checks/CheckInitUserTemplateLibrary.scala | Scala | agpl-3.0 | 7,611 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive.execution
import java.io.File
import org.apache.spark.sql.catalyst.util._
/**
* A framework for running the query tests that are listed as a set of text files.
*
* TestSuites that derive from this class must provide a map of testCaseName -> testCaseFiles
* that should be included. Additionally, there is support for whitelisting and blacklisting
* tests as development progresses.
*/
abstract class HiveQueryFileTest extends HiveComparisonTest {
/** A list of tests deemed out of scope and thus completely disregarded */
def blackList: Seq[String] = Nil
/**
* The set of tests that are believed to be working in catalyst. Tests not in whiteList
* blacklist are implicitly marked as ignored.
*/
def whiteList: Seq[String] = ".*" :: Nil
def testCases: Seq[(String, File)]
val runAll: Boolean =
!(System.getProperty("spark.hive.alltests") == null) ||
runOnlyDirectories.nonEmpty ||
skipDirectories.nonEmpty
val whiteListProperty: String = "spark.hive.whitelist"
// Allow the whiteList to be overridden by a system property
val realWhiteList: Seq[String] =
Option(System.getProperty(whiteListProperty)).map(_.split(",").toSeq).getOrElse(whiteList)
// Go through all the test cases and add them to scala test.
testCases.sorted.foreach {
case (testCaseName, testCaseFile) =>
if (blackList.map(_.r.pattern.matcher(testCaseName).matches()).reduceLeft(_||_)) {
logDebug(s"Blacklisted test skipped $testCaseName")
} else if (realWhiteList.map(_.r.pattern.matcher(testCaseName).matches()).reduceLeft(_||_) ||
runAll) {
// Build a test case and submit it to scala test framework...
val queriesString = fileToString(testCaseFile)
createQueryTest(testCaseName, queriesString, reset = true, tryWithoutResettingFirst = true)
} else {
// Only output warnings for the built in whitelist as this clutters the output when the user
// trying to execute a single test from the commandline.
if (System.getProperty(whiteListProperty) == null && !runAll) {
ignore(testCaseName) {}
}
}
}
}
| Panos-Bletsos/spark-cost-model-optimizer | sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQueryFileTest.scala | Scala | apache-2.0 | 2,974 |
package geotrellis.transit
import com.vividsolutions.jts.index.strtree.{STRtree, ItemDistance, ItemBoundable}
import com.vividsolutions.jts.index.strtree.ItemDistance
import com.vividsolutions.jts.geom.Coordinate
import com.vividsolutions.jts.geom.Envelope
import scala.collection.mutable
import scala.collection.JavaConversions._
import geotrellis.vector.Extent
// object SpatialIndex {
// def apply(points:Iterable[(Double,Double)])
// (implicit di:DummyImplicit):SpatialIndex[(Double,Double)] = {
// val si = new SpatialIndex[(Double,Double)](Measure.Dumb)
// for(point <- points) {
// si.insert(point, point._1, point._2)
// }
// si
// }
// def apply[T](points:Iterable[T])(f:T=>(Double,Double)):SpatialIndex[T] = {
// val si = new SpatialIndex[T](Measure.Dumb)
// for(point <- points) {
// val (x,y) = f(point)
// si.insert(point, x, y)
// }
// si
// }
// }
// class SpatialIndex[T](val measure:Measure) extends Serializable {
// val rtree = new STRtree
// val points = mutable.Set[T]()
// def insert(v:T, x: Double, y: Double) = {
// rtree.insert(new Envelope(new Coordinate(x,y)), v)
// points.add(v)
// }
// def nearest(x:Double,y:Double):T = {
// rtree.nearestNeighbour(new Envelope(new Coordinate(x,y)),null,measure).asInstanceOf[T]
// }
// def nearest(pt:(Double,Double)):T = {
// val e = new Envelope(new Coordinate(pt._1,pt._2))
// rtree.nearestNeighbour(e,null,measure).asInstanceOf[T]
// }
// // def nearestInExtent(extent:Extent,pt:(Double,Double)):Option[T] = {
// // val l = pointsInExtent(extent)
// // if(l.isEmpty) { None }
// // else {
// // var nearest = l.head
// // var minDist = {
// // val (x,y) = f(nearest)
// // measure.distance(x,y,pt._1,pt._2)
// // }
// // for(t <- l.tail) {
// // val (x,y) = f(t)
// // val d = measure.distance(pt._1,pt._2,x,y)
// // if(d < minDist) {
// // nearest = t
// // minDist = d
// // }
// // }
// // Some(nearest)
// // }
// // }
// def pointsInExtent(extent:Extent):Seq[T] = {
// rtree.query(new Envelope(extent.ymin,extent.ymax,extent.xmin,extent.xmax))
// .map(_.asInstanceOf[T])
// }
// def pointsInExtentAsJavaList(extent:Extent):List[_] = {
// rtree.query(new Envelope(extent.ymin,extent.ymax,extent.xmin,extent.xmax)).toList
// }
// // def mergeIn(other:SpatialIndex[T]) = {
// // for(point <- other.points) { insert(point) }
// // this
// // }
// }
// object Measure {
// def Dumb = new DumbMeasure
// }
// trait Measure extends ItemDistance with Serializable {
// def distance(x1:Double,y1:Double,x2:Double,y2:Double):Double
// def distance(i1:ItemBoundable, i2:ItemBoundable):Double = {
// val bound1 = i1.getBounds.asInstanceOf[Envelope]
// val bound2 = i2.getBounds.asInstanceOf[Envelope]
// distance(bound1.getMinX,bound1.getMinY,bound2.getMinX,bound2.getMinY)
// }
// }
// class DumbMeasure() extends Measure {
// def distance(x1:Double,y1:Double,x2:Double,y2:Double):Double = {
// val x = x2 - x1
// val y = y2 - y1
// math.sqrt(x*x + y*y)
// }
// }
| WorldBank-Transport/open-transit-indicators | scala/geotrellis-transit/src/main/scala/geotrellis/transit/SpatialIndex.scala | Scala | gpl-3.0 | 3,251 |
package threesbrain.game.core
object Move extends Enumeration {
type Move = Value
val Up, Down, Left, Right = Value
}
| zommerfelds/threes-brain | src/main/scala/threesbrain/game/core/Move.scala | Scala | mit | 127 |
package org.jetbrains.plugins.scala
package codeInsight
package intention
package types
import com.intellij.codeInsight.intention.PsiElementBaseIntentionAction
import com.intellij.openapi.command.undo.UndoUtil
import com.intellij.openapi.editor.Editor
import com.intellij.openapi.project.Project
import com.intellij.psi.PsiElement
import com.intellij.psi.util.PsiTreeUtil
import org.jetbrains.plugins.scala.extensions._
import org.jetbrains.plugins.scala.lang.psi.api.base.ScStableCodeReferenceElement
import org.jetbrains.plugins.scala.lang.psi.api.base.types.{ScParenthesisedTypeElement, ScReferenceableInfixTypeElement}
/** Converts type element `(A @@ B)` to `@@[A, B]` */
class ConvertFromInfixIntention extends PsiElementBaseIntentionAction {
def getFamilyName = "Use Prefix Type Syntax"
override def getText: String = getFamilyName
def isAvailable(project: Project, editor: Editor, element: PsiElement): Boolean = {
element match {
case Parent(Both(_: ScStableCodeReferenceElement, Parent(Parent(_: ScReferenceableInfixTypeElement)))) => true
case _ => false
}
}
override def invoke(project: Project, editor: Editor, element: PsiElement) {
val infixTypeElement = PsiTreeUtil.getParentOfType(element, classOf[ScReferenceableInfixTypeElement], false)
val elementToReplace = infixTypeElement.getParent match {
case x: ScParenthesisedTypeElement => x
case _ => infixTypeElement
}
if (element == null) return
infixTypeElement.computeDesugarizedType match {
case Some(replacement) =>
elementToReplace.replace(replacement)
UndoUtil.markPsiFileForUndo(replacement.getContainingFile)
case _ =>
}
}
}
| loskutov/intellij-scala | src/org/jetbrains/plugins/scala/codeInsight/intention/types/ConvertFromInfixIntention.scala | Scala | apache-2.0 | 1,700 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.api.scala
/**
* Custom objects for use in testing loading of external jars in [[ScalaShellITCase]].
*/
package object jar {
}
| GJL/flink | flink-scala-shell/src/test/scala/org/apache/flink/api/scala/jar/package.scala | Scala | apache-2.0 | 961 |
package scala.meta
package ui
import org.scalameta.adt._
import org.scalameta.show._
import org.scalameta.invariants._
import org.scalameta.unreachable
import Show.{ sequence => s, repeat => r, indent => i, newline => n }
import scala.{Seq => _}
import scala.collection.immutable.Seq
import scala.meta.internal.ast._
import scala.{meta => api}
import scala.meta.syntactic.Token
import scala.annotation.implicitNotFound
import scala.collection.mutable
import scala.meta.internal.semantic._
import scala.compat.Platform.EOL
import scala.language.implicitConversions
@implicitNotFound(msg = "don't know how to show[Semantics] for ${T}")
trait Semantics[T] extends Show[T]
object Semantics {
def apply[T](f: T => Show.Result): Semantics[T] = new Semantics[T] { def apply(input: T) = f(input) }
@root trait Style
object Style {
@leaf object Shallow extends Style
@leaf implicit object Deep extends Style
}
// TODO: would be nice to generate this with a macro for all tree nodes that we have
implicit def semanticsTree[T <: api.Tree](implicit style: Style): Semantics[T] = new Semantics[T] {
object footnotes {
trait Footnote {
def entity: Any
def tag: Class[_]
def prettyprint(): String
final override def toString: String = s"Footnote($entity)"
final override def equals(that: Any): Boolean = entity.equals(that)
final override def hashCode: Int = entity.hashCode()
}
object Footnote {
implicit def denotFootnote(denot: Denotation): Footnote = new Footnote {
def entity = denot
def tag = classOf[Denotation]
def prettyprint() = {
def prettyprintPrefix(pre: Prefix): String = {
pre match {
case Prefix.Zero => "0"
case Prefix.Type(tpe) => if (style == Style.Deep) body(tpe) else tpe.show[Raw]
}
}
def prettyprintSymbol(sym: Symbol): String = {
def loop(sym: Symbol): String = sym match {
case Symbol.Zero => "0"
case Symbol.Root => "_root_"
case Symbol.Empty => "_empty_"
case Symbol.Global(owner, name, Signature.Type) => loop(owner) + "#" + name
case Symbol.Global(owner, name, Signature.Term) => loop(owner) + "." + name
case Symbol.Global(owner, name, Signature.Method(jvmSignature)) => loop(owner) + "." + name + jvmSignature
case Symbol.Global(owner, name, Signature.TypeParameter) => loop(owner) + "[" + name + "]"
case Symbol.Global(owner, name, Signature.TermParameter) => loop(owner) + "(" + name + ")"
case Symbol.Local(id) => "local#" + id
}
var result = loop(sym)
if (result != "_root_") result = result.stripPrefix("_root_.")
result
}
prettyprintPrefix(denot.prefix) + "::" + prettyprintSymbol(denot.symbol)
}
}
implicit def typingFootnote(typing: Typing): Footnote = new Footnote {
def entity = typing
def tag = classOf[Typing]
def prettyprint() = typing match {
case Typing.Unknown => unreachable
case Typing.Known(tpe) => if (style == Style.Deep) body(tpe) else tpe.show[Raw]
}
}
implicit def statusExpansion(expansion: Expansion): Footnote = new Footnote {
def entity = expansion
def tag = classOf[Expansion]
def prettyprint() = expansion match {
case Expansion.Identity => unreachable
case Expansion.Desugaring(term) => if (style == Style.Deep) body(term) else term.show[Raw]
}
}
}
private var size = 0
private val repr = mutable.Map[Class[_], mutable.Map[Any, (Int, Footnote)]]()
def insert[T <% Footnote](x: T): Int = {
val footnote = implicitly[T => Footnote].apply(x)
val miniRepr = repr.getOrElseUpdate(footnote.tag, mutable.Map[Any, (Int, Footnote)]())
if (!miniRepr.contains(x)) size += 1
val maxId = (miniRepr.values.map(_._1) ++ List(0)).max
miniRepr.getOrElseUpdate(x, (maxId + 1, footnote))._1
}
override def toString: String = {
if (style == Style.Deep) {
var prevSize = 0 // NOTE: prettyprint may side-effect on footnotes
do {
prevSize = size
val stableMinis = repr.toList.sortBy(_._1.getName).map(_._2)
val stableFootnotes = stableMinis.flatMap(_.toList.sortBy(_._2._1).map(_._2._2))
stableFootnotes.foreach(_.prettyprint())
} while (size != prevSize)
}
def byType(tag: Class[_], bracket1: String, bracket2: String): List[String] = {
val miniRepr = repr.getOrElseUpdate(tag, mutable.Map[Any, (Int, Footnote)]())
val sortedMiniCache = miniRepr.toList.sortBy{ case (_, (id, footnote)) => id }
sortedMiniCache.map{ case (_, (id, footnote)) => s"$bracket1$id$bracket2 ${footnote.prettyprint()}" }
}
(byType(classOf[Denotation], "[", "]") ++ byType(classOf[Typing], "{", "}") ++ byType(classOf[Expansion], "<", ">")).mkString(EOL)
}
}
def body(x: api.Tree): String = {
def whole(x: Any): String = x match {
case x: String => enquote(x, DoubleQuotes)
case x: api.Tree => body(x)
case x: Nil.type => "Nil"
case x: List[_] => "List(" + x.map(whole).mkString(", ") + ")"
case x: None.type => "None"
case x: Some[_] => "Some(" + whole(x.get) + ")"
case x => x.toString
}
def contents(x: api.Tree): String = x match {
case x: Lit.String => enquote(x.value, DoubleQuotes)
case x: Lit => import scala.meta.dialects.Scala211; x.show[Code]
case x => x.productIterator.map(whole).mkString(", ")
}
val syntax = x.productPrefix + "(" + contents(x) + ")"
val semantics = {
val denotPart = x match {
case x: Name =>
x.denot match {
case Denotation.Zero => ""
case denot @ Denotation.Single(prefix, symbol) => s"[${footnotes.insert(denot)}]"
}
case _ =>
""
}
val statusPart = x match {
case x: Term =>
x.typing match {
case Typing.Unknown => ""
case typing @ Typing.Known(tpe) => s"{${footnotes.insert(typing)}}"
}
case _ =>
""
}
val expansionPart = x match {
case x: Term =>
x.expansion match {
case Expansion.Identity => ""
case expansion @ Expansion.Desugaring(term) => s"<${footnotes.insert(expansion)}>"
}
case _ =>
""
}
denotPart + statusPart + expansionPart
}
syntax + semantics
}
def apply(x: T): Show.Result = {
val bodyPart = body(x) // NOTE: body may side-effect on footnotes
val footnotePart = footnotes.toString
s(bodyPart, if (footnotePart.nonEmpty) EOL + footnotePart else footnotePart)
}
}
}
| mdemarne/scalameta | scalameta/src/main/scala/scala/meta/ui/ShowSemantics.scala | Scala | bsd-3-clause | 7,165 |
package sangria.schema
import language.existentials
import sangria.ast.Document
import sangria.schema.SchemaChange._
import sangria.macros._
import scala.reflect.ClassTag
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AnyWordSpec
class SchemaComparatorSpec extends AnyWordSpec with Matchers {
"SchemaComparator" should {
val QueryType = ObjectType(
"Query",
fields[Unit, Unit](Field("field1", OptionType(StringType), resolve = _ => "foo")))
"should detect if a type was removed or added" in {
val type1 = ObjectType(
"Type1",
fields[Unit, Unit](Field("field1", OptionType(StringType), resolve = _ => "foo")))
val type2 = ObjectType(
"Type2",
fields[Unit, Unit](Field("field1", OptionType(StringType), resolve = _ => "foo")))
val oldSchema = Schema(QueryType, additionalTypes = type1 :: type2 :: Nil)
val newSchema = Schema(QueryType, additionalTypes = type2 :: Nil)
assertChanges(
newSchema.compare(oldSchema),
breakingChange[TypeRemoved]("`Type1` type was removed"))
assertChanges(
oldSchema.compare(newSchema),
nonBreakingChange[TypeAdded]("`Type1` type was added"))
oldSchema.compare(oldSchema) should be(Vector.empty)
}
"should detect if a type changed its kind" in checkChanges(
graphql"""
interface Type1 {field1: String}
type Foo implements Type1 {
test: String
}
""",
graphql"""
type ObjectType {field1: String}
union Type1 = ObjectType
type Foo {
test: String
}
""",
nonBreakingChange[TypeAdded]("`ObjectType` type was added"),
breakingChange[TypeKindChanged]("`Type1` changed from an Interface type to a Union type"),
breakingChange[ObjectTypeInterfaceRemoved](
"`Foo` object type no longer implements `Type1` interface"),
breakingChange[FieldRemoved]("Field `field1` was removed from `Foo` type")
)
"detect if a type description changed " in checkChanges(
graphql"""
"normal type"
type ObjectType {field1: String}
""",
graphql"""
"Cool type"
type ObjectType {field1: String}
""",
nonBreakingChange[TypeDescriptionChanged]("`ObjectType` type description is changed")
)
"should detect changes in enum values" in checkChanges(
graphql"""
enum Foo {
A, B, C
}
""",
graphql"""
enum Foo {
B @deprecated(reason: "Should not be used anymore")
"The `B`\\nvalue!"
C, D
}
""",
breakingChange[EnumValueRemoved]("Enum value `A` was removed from enum `Foo`"),
nonBreakingChange[EnumValueAdded]("Enum value `D` was added to enum `Foo`"),
nonBreakingChange[EnumValueDescriptionChanged]("`Foo.C` description changed"),
nonBreakingChange[EnumValueDeprecated]("Enum value `B` was deprecated in enum `Foo`"),
nonBreakingChange[EnumValueAstDirectiveAdded](
"Directive `@deprecated(reason:\\"Should not be used anymore\\")` added on an enum value `Foo.B`")
)
"should detect changes in unions" in checkChanges(
graphql"""
type Foo {f: String}
type Bar {descr: String}
union Agg = Foo | Bar
""",
graphql"""
type Bar {descr: String}
type Baz {descr: String}
"Hello"
union Agg = Bar | Baz
""",
nonBreakingChange[TypeAdded]("`Baz` type was added"),
breakingChange[TypeRemoved]("`Foo` type was removed"),
breakingChange[UnionMemberRemoved]("`Foo` type was removed from union `Agg`"),
nonBreakingChange[UnionMemberAdded]("`Baz` type was added to union `Agg`"),
nonBreakingChange[TypeDescriptionChanged]("`Agg` type description is changed")
)
"should detect changes in scalars" in checkChanges(
graphql"""
scalar Date
scalar Locale
""",
graphql"""
"This is locale"
scalar Locale
"This is country"
scalar Country
""",
breakingChange[TypeRemoved]("`Date` type was removed"),
nonBreakingChange[TypeAdded]("`Country` type was added"),
nonBreakingChange[TypeDescriptionChanged]("`Locale` type description is changed")
)
"should detect changes in directives" in checkChanges(
graphql"""
directive @foo(a: String, b: Int!) on FIELD_DEFINITION | ENUM
directive @bar on FIELD_DEFINITION
""",
graphql"""
"This is foo"
directive @foo(
"first arg"
a: String,
c: Int) on FIELD_DEFINITION | INPUT_OBJECT
"This is baz"
directive @baz on FIELD_DEFINITION
""",
breakingChange[DirectiveRemoved]("`bar` directive was removed"),
nonBreakingChange[DirectiveAdded]("`baz` directive was added"),
nonBreakingChange[DirectiveDescriptionChanged]("`foo` directive description is changed"),
nonBreakingChange[DirectiveArgumentDescriptionChanged]("`foo(a)` description is changed"),
breakingChange[DirectiveArgumentRemoved]("Argument `b` was removed from `foo` directive"),
nonBreakingChange[DirectiveLocationAdded](
"`InputObject` directive location added to `foo` directive"),
breakingChange[DirectiveLocationRemoved](
"`Enum` directive location removed from `foo` directive"),
nonBreakingChange[DirectiveArgumentAdded]("Argument `c` was added to `foo` directive")
)
"should detect changes in input types" in checkChanges(
graphql"""
input Sort {dir: Int}
input Bar {size: Int}
""",
graphql"""
"This is sort"
input Sort {dir: Int}
"This is foo"
input Foo {size: Int}
""",
breakingChange[TypeRemoved]("`Bar` type was removed"),
nonBreakingChange[TypeAdded]("`Foo` type was added"),
nonBreakingChange[TypeDescriptionChanged]("`Sort` type description is changed")
)
"detect changes in input type fields when they are added or removed" in checkChanges(
graphql"""
input Filter {
name: String!
descr: String
}
""",
graphql"""
"search filter"
input Filter {
"filter by name"
name: String!
"filter by size"
size: Int
}
""",
nonBreakingChange[TypeAdded]("`Int` type was added"),
breakingChange[InputFieldRemoved]("Input field `descr` was removed from `Filter` type"),
nonBreakingChange[InputFieldAdded]("Input field `size` was added to `Filter` type"),
nonBreakingChange[InputFieldDescriptionChanged]("`Filter.name` description is changed"),
nonBreakingChange[TypeDescriptionChanged]("`Filter` type description is changed")
)
"detect changes in object like type fields and interfaces when they are added or removed" in checkChanges(
graphql"""
interface I1 {
name: String!
}
interface I2 {
descr: String
}
type Filter implements I1 & I2 {
name: String!
descr: String
foo: [Int]
}
""",
graphql"""
interface I1 {
bar: Int
}
interface I3 {
descr: String
id: ID
}
type Filter implements I1 & I3 {
bar: Int
descr: String
id: ID
}
""",
breakingChange[TypeRemoved]("`I2` type was removed"),
nonBreakingChange[TypeAdded]("`ID` type was added"),
nonBreakingChange[TypeAdded]("`I3` type was added"),
breakingChange[ObjectTypeInterfaceRemoved](
"`Filter` object type no longer implements `I2` interface"),
nonBreakingChange[ObjectTypeInterfaceAdded](
"`Filter` object type now implements `I3` interface"),
breakingChange[FieldRemoved]("Field `name` was removed from `Filter` type"),
breakingChange[FieldRemoved]("Field `foo` was removed from `Filter` type"),
nonBreakingChange[FieldAdded]("Field `id` was added to `Filter` type"),
nonBreakingChange[FieldAdded]("Field `bar` was added to `Filter` type"),
nonBreakingChange[FieldAdded]("Field `bar` was added to `I1` type"),
breakingChange[FieldRemoved]("Field `name` was removed from `I1` type")
)
"detect changes in object type arguments" in checkChanges(
graphql"""
type Filter {
foo(
a: String!
b: String
b1: String
c: [String]
): String!
}
""",
graphql"""
type Filter {
foo(
"descr"
a: String = "foo"
b: [String]
b1: String!
c: [String]!
d: Int
e: Int!
): String!
}
""",
nonBreakingChange[TypeAdded]("`Int` type was added"),
breakingChange[ObjectTypeArgumentTypeChanged](
"`Filter.foo(b)` type changed from `String` to `[String]`"),
nonBreakingChange[ObjectTypeArgumentAdded]("Argument `d` was added to `Filter.foo` field"),
breakingChange[ObjectTypeArgumentTypeChanged](
"`Filter.foo(b1)` type changed from `String` to `String!`"),
nonBreakingChange[ObjectTypeArgumentTypeChanged](
"`Filter.foo(a)` type changed from `String!` to `String`"),
breakingChange[ObjectTypeArgumentTypeChanged](
"`Filter.foo(c)` type changed from `[String]` to `[String]!`"),
breakingChange[ObjectTypeArgumentAdded]("Argument `e` was added to `Filter.foo` field"),
nonBreakingChange[ObjectTypeArgumentDefaultChanged](
"`Filter.foo(a)` default value changed from none to `\\"foo\\"`"),
nonBreakingChange[ObjectTypeArgumentDescriptionChanged](
"`Filter.foo(a)` description is changed")
)
"detect changes in input type fields default value changes" in checkChanges(
graphql"""
input Filter {
a: [String!] = ["hello", "world"]
size: Int
color: Int = 5
type: [Int] = [1, 2, 3]
}
""",
graphql"""
input Filter {
a: [String!] = ["foo"]
size: Int = 12
color: String = "red"
type: [Int!] = [1, 2, 3]
}
""",
breakingChange[InputFieldTypeChanged](
"""`Filter.color` input field type changed from `Int` to `String`"""),
breakingChange[InputFieldTypeChanged](
"""`Filter.type` input field type changed from `[Int]` to `[Int!]`"""),
nonBreakingChange[InputFieldDefaultChanged](
"""`Filter.a` default value changed from `["hello","world"]` to `["foo"]`"""),
nonBreakingChange[InputFieldDefaultChanged](
"`Filter.size` default value changed from none to `12`"),
nonBreakingChange[InputFieldDefaultChanged](
"""`Filter.color` default value changed from `5` to `"red"`""")
)
"detect breaking and non-breaking changes in input type fields type" in checkChanges(
graphql"""
input Filter {
a: String!
b: String
b1: String
c: [String]
}
""",
graphql"""
input Filter {
a: String
b: [String]
b1: String!
c: [String]!
d: Int
e: Int!
}
""",
nonBreakingChange[TypeAdded]("`Int` type was added"),
breakingChange[InputFieldAdded]("Input field `e` was added to `Filter` type"),
nonBreakingChange[InputFieldAdded]("Input field `d` was added to `Filter` type"),
breakingChange[InputFieldTypeChanged](
"`Filter.b` input field type changed from `String` to `[String]`"),
nonBreakingChange[InputFieldTypeChanged](
"`Filter.a` input field type changed from `String!` to `String`"),
breakingChange[InputFieldTypeChanged](
"`Filter.c` input field type changed from `[String]` to `[String]!`"),
breakingChange[InputFieldTypeChanged](
"`Filter.b1` input field type changed from `String` to `String!`")
)
"detect changes in schema definition" in checkChangesWithoutQueryType(
graphql"""
type Query {
foo: String
}
""",
graphql"""
type Foo {
foo: String
}
type Mut {
bar: Int!
}
type Subs {
bar: Int!
}
schema {
query: Foo
mutation: Mut
subscription: Subs
}
""",
breakingChange[TypeRemoved]("`Query` type was removed"),
nonBreakingChange[TypeAdded]("`Mut` type was added"),
nonBreakingChange[TypeAdded]("`Int` type was added"),
nonBreakingChange[TypeAdded]("`Subs` type was added"),
nonBreakingChange[TypeAdded]("`Foo` type was added"),
breakingChange[SchemaQueryTypeChanged](
"Schema query type changed from `Query` to `Foo` type"),
nonBreakingChange[SchemaMutationTypeChanged](
"Schema mutation type changed from none to `Mut` type"),
nonBreakingChange[SchemaSubscriptionTypeChanged](
"Schema subscription type changed from none to `Subs` type")
)
"detect breaking changes in schema definition" in checkChangesWithoutQueryType(
graphql"""
type Query {
foo: String
}
type Mut {
bar: Int!
}
type Subs {
bar: Int!
}
schema {
query: Query
mutation: Mut
subscription: Subs
}
""",
graphql"""
type Query {
foo: String
}
type Subs1 {
bar: Int!
}
schema {
query: Query
subscription: Subs1
}
""",
breakingChange[TypeRemoved]("`Mut` type was removed"),
breakingChange[TypeRemoved]("`Subs` type was removed"),
nonBreakingChange[TypeAdded]("`Subs1` type was added"),
breakingChange[SchemaMutationTypeChanged](
"Schema mutation type changed from `Mut` to none type"),
breakingChange[SchemaSubscriptionTypeChanged](
"Schema subscription type changed from `Subs` to `Subs1` type")
)
"detect changes in field AST directives" in checkChangesWithoutQueryType(
gql"""
type Query {
foo: String @foo
}
""",
gql"""
type Query {
foo: String @bar(ids: [1, 2])
}
""",
nonBreakingChange[FieldAstDirectiveAdded](
"Directive `@bar(ids:[1,2])` added on a field `Query.foo`"),
nonBreakingChange[FieldAstDirectiveRemoved](
"Directive `@foo` removed from a field `Query.foo`")
)
"detect changes in argument AST directives" in checkChangesWithoutQueryType(
gql"""
type Query {
foo(bar: String @foo): String
}
directive @test(bar: String @hello) on FIELD
""",
gql"""
type Query {
foo(bar: String @bar(ids: [1, 2])): String
}
directive @test(bar: String @world) on FIELD
""",
nonBreakingChange[FieldArgumentAstDirectiveAdded](
"Directive `@bar(ids:[1,2])` added on a field argument `Query.foo[bar]`"),
nonBreakingChange[FieldArgumentAstDirectiveRemoved](
"Directive `@foo` removed from a field argument `Query.foo[bar]`"),
nonBreakingChange[DirectiveArgumentAstDirectiveRemoved](
"Directive `@hello` removed from a directive argument `test.bar`"),
nonBreakingChange[DirectiveArgumentAstDirectiveAdded](
"Directive `@world` added on a directive argument `test.bar`")
)
"detect changes in input field AST directives" in checkChangesWithoutQueryType(
gql"""
type Query {a: Int}
input Foo {
foo: String = "test" @foo @baz(s: "string")
}
""",
gql"""
type Query {a: Int}
input Foo {
foo: String @baz(s: "string") @bar(ids: [1, 2])
}
""",
nonBreakingChange[InputFieldAstDirectiveAdded](
"Directive `@bar(ids:[1,2])` added on an input field `Foo.foo`"),
nonBreakingChange[InputFieldAstDirectiveRemoved](
"Directive `@foo` removed from a input field `Foo.foo`"),
nonBreakingChange[InputFieldDefaultChanged](
"`Foo.foo` default value changed from `\\"test\\"` to none")
)
"detect changes in enum values AST directives" in checkChangesWithoutQueryType(
gql"""
type Query {a: Int}
enum Foo {
A @foo
}
""",
gql"""
type Query {a: Int}
enum Foo {
A @bar(ids: [1, 2])
}
""",
nonBreakingChange[EnumValueAstDirectiveAdded](
"Directive `@bar(ids:[1,2])` added on an enum value `Foo.A`"),
nonBreakingChange[EnumValueAstDirectiveRemoved](
"Directive `@foo` removed from a enum value `Foo.A`")
)
"detect changes in schema AST directives" in checkChangesWithoutQueryType(
gql"""
type Query {
foo: String
}
schema @foo {
query: Query
}
""",
gql"""
type Query {
foo: String
}
schema @bar(ids: [1, 2]) {
query: Query
}
""",
nonBreakingChange[SchemaAstDirectiveAdded]("Directive `@bar(ids:[1,2])` added on a schema"),
nonBreakingChange[SchemaAstDirectiveRemoved]("Directive `@foo` removed from a schema")
)
"detect changes in schema description" in checkChangesWithoutQueryType(
gql"""
type Query {
foo: String
}
schema {
query: Query
}
""",
gql"""
type Query {
foo: String
}
"new description"
schema {
query: Query
}
""",
nonBreakingChange[SchemaDescriptionChanged]("Schema description changed")
)
"detect changes in type AST directives" in checkChangesWithoutQueryType(
gql"""
type Query implements Foo2 {
foo: String
a: Int
}
input Foo @bar(ids: [1, 2]) {
a: Int
}
type Foo1 @bar(ids: [1, 2]) {
a: Int
}
interface Foo2 @bar(ids: [1, 2]) {
a: Int
}
union Foo3 @bar(ids: [1, 2]) = Query | Foo1
enum Foo4 @bar(ids: [1, 2]) {
A B C
}
scalar Foo5 @bar(ids: [1, 2])
""",
gql"""
type Query implements Foo2 {
foo: String
a: Int
}
input Foo @bar(ids: [1]) {
a: Int
}
type Foo1 @baz {
a: Int
}
interface Foo2 @baz {
a: Int
}
union Foo3 @bar(id: 1) = Query | Foo1
enum Foo4 @bar(id: 1) {
A B C
}
scalar Foo5 @bar(ids: [1])
""",
nonBreakingChange[InputObjectTypeAstDirectiveAdded](
"Directive `@bar(ids:[1])` added on an input type `Foo`"),
nonBreakingChange[InputObjectTypeAstDirectiveRemoved](
"Directive `@bar(ids:[1,2])` removed from an input type `Foo`"),
nonBreakingChange[ObjectTypeAstDirectiveAdded](
"Directive `@baz` added on an object type `Foo1`"),
nonBreakingChange[ObjectTypeAstDirectiveRemoved](
"Directive `@bar(ids:[1,2])` removed from an object type `Foo1`"),
nonBreakingChange[InterfaceTypeAstDirectiveAdded](
"Directive `@baz` added on an interface type `Foo2`"),
nonBreakingChange[InterfaceTypeAstDirectiveRemoved](
"Directive `@bar(ids:[1,2])` removed from an interface type `Foo2`"),
nonBreakingChange[UnionTypeAstDirectiveAdded](
"Directive `@bar(id:1)` added on a union type `Foo3`"),
nonBreakingChange[UnionTypeAstDirectiveRemoved](
"Directive `@bar(ids:[1,2])` removed from a union type `Foo3`"),
nonBreakingChange[EnumTypeAstDirectiveAdded](
"Directive `@bar(id:1)` added on an enum type `Foo4`"),
nonBreakingChange[EnumTypeAstDirectiveRemoved](
"Directive `@bar(ids:[1,2])` removed from an enum type `Foo4`"),
nonBreakingChange[ScalarTypeAstDirectiveAdded](
"Directive `@bar(ids:[1])` added on a scalar type `Foo5`"),
nonBreakingChange[ScalarTypeAstDirectiveRemoved](
"Directive `@bar(ids:[1,2])` removed from a scalar type `Foo5`")
)
}
private[this] def breakingChange[T: ClassTag](description: String) =
(implicitly[ClassTag[T]].runtimeClass, description, true)
private[this] def nonBreakingChange[T: ClassTag](description: String) =
(implicitly[ClassTag[T]].runtimeClass, description, false)
private[this] def checkChanges(
oldDoc: Document,
newDoc: Document,
expectedChanges: (Class[_], String, Boolean)*): Unit = {
val queryType =
graphql"""
type Query {
field1: String
}
"""
val oldSchema = Schema.buildFromAst(oldDoc.merge(queryType))
val newSchema = Schema.buildFromAst(newDoc.merge(queryType))
assertChanges(newSchema.compare(oldSchema), expectedChanges: _*)
}
private[this] def checkChangesWithoutQueryType(
oldDoc: Document,
newDoc: Document,
expectedChanges: (Class[_], String, Boolean)*): Unit = {
val oldSchema = Schema.buildFromAst(oldDoc)
val newSchema = Schema.buildFromAst(newDoc)
assertChanges(newSchema.compare(oldSchema), expectedChanges: _*)
}
private[this] def assertChanges(
actualChanges: Vector[SchemaChange],
expectedChanges: (Class[_], String, Boolean)*): Unit = {
val actualRendered = actualChanges
.map(c =>
s" * ${c.getClass.getSimpleName}: ${c.description}${if (c.breakingChange) " (breaking)"
else ""}")
.mkString("\\n")
withClue(s"Actual changes:\\n$actualRendered\\n") {
actualChanges should have size expectedChanges.size
val notFound = expectedChanges.filter(expectedChange =>
!actualChanges.exists(ac =>
expectedChange._1.isAssignableFrom(
ac.getClass) && ac.description == expectedChange._2 && ac.breakingChange == expectedChange._3))
if (notFound.nonEmpty) {
val str = notFound
.map(nf => s" * ${nf._1.getSimpleName}: ${nf._2}${if (nf._3) " (breaking)" else ""}")
.mkString("\\n")
fail(s"Changes not found:\\n $str")
}
}
}
}
| sangria-graphql/sangria | modules/core/src/test/scala/sangria/schema/SchemaComparatorSpec.scala | Scala | apache-2.0 | 22,465 |
package stormlantern.consul.client
import akka.actor.{ ActorRef, ActorSystem }
import akka.actor.Status.Failure
import akka.testkit.{ ImplicitSender, TestKit }
import org.scalamock.scalatest.MockFactory
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{ BeforeAndAfterAll, FlatSpecLike, Matchers }
import stormlantern.consul.client.dao.ConsulHttpClient
import stormlantern.consul.client.discovery.ConnectionHolder
import stormlantern.consul.client.helpers.CallingThreadExecutionContext
import stormlantern.consul.client.loadbalancers.LoadBalancerActor
import stormlantern.consul.client.util.Logging
import scala.concurrent.Future
class ServiceBrokerSpec(_system: ActorSystem) extends TestKit(_system) with ImplicitSender with FlatSpecLike
with Matchers with ScalaFutures with BeforeAndAfterAll with MockFactory with Logging {
implicit val ec = CallingThreadExecutionContext()
def this() = this(ActorSystem("ServiceBrokerSpec"))
override def afterAll() {
TestKit.shutdownActorSystem(system)
}
trait TestScope {
val connectionHolder: ConnectionHolder = mock[ConnectionHolder]
val httpClient: ConsulHttpClient = mock[ConsulHttpClient]
val loadBalancer: ActorRef = self
}
"The ServiceBroker" should "return a service connection when requested" in new TestScope {
(connectionHolder.connection _).expects().returns(Future.successful(true))
(connectionHolder.loadBalancer _).expects().returns(loadBalancer)
val sut = new ServiceBroker(self, httpClient)
val result: Future[Boolean] = sut.withService("service1") { service: Boolean β
Future.successful(service)
}
expectMsgPF() {
case ServiceBrokerActor.GetServiceConnection("service1") β
lastSender ! connectionHolder
result.map(_ shouldEqual true).futureValue
}
expectMsg(LoadBalancerActor.ReturnConnection(connectionHolder))
}
it should "return the connection when an error occurs" in new TestScope {
(connectionHolder.connection _).expects().returns(Future.successful(true))
(connectionHolder.loadBalancer _).expects().returns(loadBalancer)
val sut = new ServiceBroker(self, httpClient)
val result: Future[Boolean] = sut.withService[Boolean, Boolean]("service1") { service: Boolean β
throw new RuntimeException()
}
expectMsgPF() {
case ServiceBrokerActor.GetServiceConnection("service1") β
lastSender ! connectionHolder
an[RuntimeException] should be thrownBy result.futureValue
}
expectMsg(LoadBalancerActor.ReturnConnection(connectionHolder))
}
it should "throw an error when an excpetion is returned" in new TestScope {
val sut = new ServiceBroker(self, httpClient)
val result: Future[Boolean] = sut.withService("service1") { service: Boolean β
Future.successful(service)
}
expectMsgPF() {
case ServiceBrokerActor.GetServiceConnection("service1") β
lastSender ! Failure(new RuntimeException())
an[RuntimeException] should be thrownBy result.futureValue
}
}
}
| dlouwers/reactive-consul | client/src/test/scala/stormlantern/consul/client/ServiceBrokerSpec.scala | Scala | mit | 3,056 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.index
import java.nio.charset.StandardCharsets
import java.util.Comparator
import org.geotools.data.{Query, Transaction}
import org.geotools.factory.Hints
import org.locationtech.geomesa.index.TestGeoMesaDataStore.{TestWrappedFeature, TestWrite, _}
import org.locationtech.geomesa.index.api.{GeoMesaIndexManager, _}
import org.locationtech.geomesa.index.geotools.GeoMesaDataStoreFactory.GeoMesaDataStoreConfig
import org.locationtech.geomesa.index.geotools.{GeoMesaAppendFeatureWriter, GeoMesaDataStore, GeoMesaFeatureWriter, GeoMesaModifyFeatureWriter}
import org.locationtech.geomesa.index.index._
import org.locationtech.geomesa.index.index.legacy.AttributeDateIndex
import org.locationtech.geomesa.index.index.z2.Z2Index
import org.locationtech.geomesa.index.index.z3.Z3Index
import org.locationtech.geomesa.index.metadata.GeoMesaMetadata
import org.locationtech.geomesa.index.stats._
import org.locationtech.geomesa.index.utils.{Explainer, LocalLocking}
import org.locationtech.geomesa.utils.audit.{AuditProvider, AuditWriter}
import org.locationtech.geomesa.utils.collection.{CloseableIterator, SelfClosingIterator}
import org.locationtech.geomesa.utils.index.IndexMode
import org.locationtech.geomesa.utils.index.IndexMode.IndexMode
import org.locationtech.geomesa.utils.stats.{SeqStat, Stat}
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.opengis.filter.Filter
class TestGeoMesaDataStore(looseBBox: Boolean)
extends GeoMesaDataStore[TestGeoMesaDataStore, TestWrappedFeature, TestWrite](TestConfig(looseBBox))
with LocalLocking {
override val metadata: GeoMesaMetadata[String] = new InMemoryMetadata[String]
override val stats: GeoMesaStats = new TestStats(this)
override val manager: TestIndexManager = new TestIndexManager
override protected def createFeatureWriterAppend(sft: SimpleFeatureType,
indices: Option[Seq[TestFeatureIndexType]]): TestFeatureWriterType =
new TestAppendFeatureWriter(sft, this, indices)
override protected def createFeatureWriterModify(sft: SimpleFeatureType,
indices: Option[Seq[TestFeatureIndexType]],
filter: Filter): TestFeatureWriterType =
new TestModifyFeatureWriter(sft, this, indices, filter)
override def delete(): Unit = throw new NotImplementedError()
}
object TestGeoMesaDataStore {
type TestFeatureIndexType = GeoMesaFeatureIndex[TestGeoMesaDataStore, TestWrappedFeature, TestWrite]
type TestFeatureWriterType = GeoMesaFeatureWriter[TestGeoMesaDataStore, TestWrappedFeature, TestWrite, TestFeatureIndex]
type TestAppendFeatureWriterType = GeoMesaAppendFeatureWriter[TestGeoMesaDataStore, TestWrappedFeature, TestWrite, TestFeatureIndex]
type TestModifyFeatureWriterType = GeoMesaModifyFeatureWriter[TestGeoMesaDataStore, TestWrappedFeature, TestWrite, TestFeatureIndex]
type TestIndexManagerType = GeoMesaIndexManager[TestGeoMesaDataStore, TestWrappedFeature, TestWrite]
type TestQueryPlanType = QueryPlan[TestGeoMesaDataStore, TestWrappedFeature, TestWrite]
type TestFilterStrategyType = FilterStrategy[TestGeoMesaDataStore, TestWrappedFeature, TestWrite]
val ByteComparator = new Comparator[Array[Byte]] {
override def compare(o1: Array[Byte], o2: Array[Byte]): Int = {
val minLength = if (o1.length < o2.length) { o1.length } else { o2.length }
var i = 0
while (i < minLength) {
if (o1(i) != o2(i)) {
return (o1(i) & 0xff) - (o2(i) & 0xff)
}
i += 1
}
o1.length - o2.length
}
}
case class TestWrappedFeature(feature: SimpleFeature) extends WrappedFeature {
override lazy val idBytes: Array[Byte] = feature.getID.getBytes(StandardCharsets.UTF_8)
}
case class TestWrite(row: Array[Byte], feature: SimpleFeature, delete: Boolean = false)
case class TestRange(start: Array[Byte], end: Array[Byte]) {
override def toString: String = s"TestRange(${start.mkString(":")}, ${end.mkString(":")}}"
}
case class TestScanConfig(ranges: Seq[TestRange], ecql: Option[Filter])
case class TestConfig(looseBBox: Boolean) extends GeoMesaDataStoreConfig {
override val catalog: String = "test"
override val audit: Option[(AuditWriter, AuditProvider, String)] = None
override val generateStats: Boolean = true
override val queryThreads: Int = 1
override val queryTimeout: Option[Long] = None
override val caching: Boolean = false
override val namespace: Option[String] = None
}
class TestIndexManager extends GeoMesaIndexManager[TestGeoMesaDataStore, TestWrappedFeature, TestWrite] {
override val CurrentIndices: Seq[TestFeatureIndex] =
Seq(new TestZ3Index, new TestZ2Index, new TestIdIndex, new TestAttributeIndex)
override val AllIndices: Seq[TestFeatureIndex] = CurrentIndices :+ new TestAttributeDateIndex
override def lookup: Map[(String, Int), TestFeatureIndex] =
super.lookup.asInstanceOf[Map[(String, Int), TestFeatureIndex]]
override def indices(sft: SimpleFeatureType,
idx: Option[String] = None,
mode: IndexMode = IndexMode.Any): Seq[TestFeatureIndex] =
super.indices(sft, idx, mode).asInstanceOf[Seq[TestFeatureIndex]]
override def index(identifier: String): TestFeatureIndex = super.index(identifier).asInstanceOf[TestFeatureIndex]
}
class TestZ3Index extends TestFeatureIndex
with Z3Index[TestGeoMesaDataStore, TestWrappedFeature, TestWrite, TestRange, TestScanConfig]
class TestZ2Index extends TestFeatureIndex
with Z2Index[TestGeoMesaDataStore, TestWrappedFeature, TestWrite, TestRange, TestScanConfig]
class TestIdIndex extends TestFeatureIndex
with IdIndex[TestGeoMesaDataStore, TestWrappedFeature, TestWrite, TestRange, TestScanConfig]
class TestAttributeIndex extends TestFeatureIndex
with AttributeIndex[TestGeoMesaDataStore, TestWrappedFeature, TestWrite, TestRange, TestScanConfig] {
override val version: Int = 2
}
class TestAttributeDateIndex extends TestFeatureIndex
with AttributeDateIndex[TestGeoMesaDataStore, TestWrappedFeature, TestWrite, TestRange, TestScanConfig]
trait TestFeatureIndex extends TestFeatureIndexType
with IndexAdapter[TestGeoMesaDataStore, TestWrappedFeature, TestWrite, TestRange, TestScanConfig] {
private val ordering = new Ordering[(Array[Byte], SimpleFeature)] {
override def compare(x: (Array[Byte], SimpleFeature), y: (Array[Byte], SimpleFeature)): Int =
ByteComparator.compare(x._1, y._1)
}
val features = scala.collection.mutable.SortedSet.empty[(Array[Byte], SimpleFeature)](ordering)
override val version = 1
override def removeAll(sft: SimpleFeatureType, ds: TestGeoMesaDataStore): Unit = features.clear()
override def delete(sft: SimpleFeatureType, ds: TestGeoMesaDataStore, shared: Boolean): Unit = features.clear()
override protected def createInsert(row: Array[Byte], feature: TestWrappedFeature): TestWrite =
TestWrite(row, feature.feature)
override protected def createDelete(row: Array[Byte], feature: TestWrappedFeature): TestWrite =
TestWrite(row, feature.feature, delete = true)
override protected def range(start: Array[Byte], end: Array[Byte]): TestRange = TestRange(start, end)
override protected def rangeExact(row: Array[Byte]): TestRange = TestRange(row, IndexAdapter.rowFollowingRow(row))
override protected def scanConfig(sft: SimpleFeatureType,
ds: TestGeoMesaDataStore,
filter: TestFilterStrategyType,
ranges: Seq[TestRange],
ecql: Option[Filter],
hints: Hints): TestScanConfig = TestScanConfig(ranges, ecql)
override protected def scanPlan(sft: SimpleFeatureType,
ds: TestGeoMesaDataStore,
filter: TestFilterStrategyType,
config: TestScanConfig): TestQueryPlanType = TestQueryPlan(this, filter, config.ranges, config.ecql)
override def toString: String = getClass.getSimpleName
}
class TestAppendFeatureWriter(sft: SimpleFeatureType,
ds: TestGeoMesaDataStore,
indices: Option[Seq[TestFeatureIndexType]])
extends TestFeatureWriterType(sft, ds, indices) with TestAppendFeatureWriterType with TestFeatureWriter
class TestModifyFeatureWriter(sft: SimpleFeatureType,
ds: TestGeoMesaDataStore,
indices: Option[Seq[TestFeatureIndexType]],
val filter: Filter)
extends TestFeatureWriterType(sft, ds, indices) with TestModifyFeatureWriterType with TestFeatureWriter
trait TestFeatureWriter extends TestFeatureWriterType {
override protected def createMutators(tables: IndexedSeq[String]): IndexedSeq[TestFeatureIndex] =
tables.map(t => ds.manager.indices(sft, mode = IndexMode.Write).find(_.getTableName(sft.getTypeName, ds) == t).orNull)
override protected def executeWrite(mutator: TestFeatureIndex, writes: Seq[TestWrite]): Unit = {
writes.foreach { case TestWrite(row, feature, _) => mutator.features.add((row, feature)) }
}
override protected def executeRemove(mutator: TestFeatureIndex, removes: Seq[TestWrite]): Unit =
removes.foreach { case TestWrite(row, feature, _) => mutator.features.remove((row, feature)) }
override def wrapFeature(feature: SimpleFeature): TestWrappedFeature = TestWrappedFeature(feature)
}
case class TestQueryPlan(index: TestFeatureIndex,
filter: TestFilterStrategyType,
ranges: Seq[TestRange],
ecql: Option[Filter]) extends TestQueryPlanType {
override def scan(ds: TestGeoMesaDataStore): CloseableIterator[SimpleFeature] = {
def contained(range: TestRange, row: Array[Byte]): Boolean =
ByteComparator.compare(range.start, row) <= 0 && ByteComparator.compare(range.end, row) > 0
index.features.toIterator.collect {
case (row, sf) if ranges.exists(contained(_, row)) && ecql.forall(_.evaluate(sf)) => sf
}
}
override def explain(explainer: Explainer, prefix: String): Unit = {
explainer(s"ranges (${ranges.length}): ${ranges.take(5).map(r => s"[${r.start.mkString("")}:${r.end.mkString("")})")}")
explainer(s"ecql: ${ecql.map(org.locationtech.geomesa.filter.filterToString).getOrElse("INCLUDE")}")
}
}
class TestStats(override protected val ds: TestGeoMesaDataStore) extends MetadataBackedStats {
override private [geomesa] val metadata = new InMemoryMetadata[Stat]
override protected val generateStats = true
override def runStats[T <: Stat](sft: SimpleFeatureType, stats: String, filter: Filter): Seq[T] = {
val stat = Stat(sft, stats)
SelfClosingIterator(ds.getFeatureReader(new Query(sft.getTypeName, filter), Transaction.AUTO_COMMIT)).foreach(stat.observe)
stat match {
case s: SeqStat => s.stats.asInstanceOf[Seq[T]]
case s: T => Seq(s)
}
}
}
} | boundlessgeo/geomesa | geomesa-index-api/src/test/scala/org/locationtech/geomesa/index/TestGeoMesaDataStore.scala | Scala | apache-2.0 | 11,835 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.check.body
import io.gatling.core.check.{ CheckMaterializer, Preparer }
import io.gatling.core.check.jsonpath.JsonPathCheckType
import io.gatling.core.json.JsonParsers
import io.gatling.http.check.{ HttpCheck, HttpCheckMaterializer }
import io.gatling.http.check.HttpCheckScope.Body
import io.gatling.http.response.Response
import com.fasterxml.jackson.databind.JsonNode
object HttpBodyJsonPathCheckMaterializer {
def instance(jsonParsers: JsonParsers): CheckMaterializer[JsonPathCheckType, HttpCheck, Response, JsonNode] = {
val preparer: Preparer[Response, JsonNode] = response => jsonParsers.safeParse(response.body.stream, response.body.charset)
new HttpCheckMaterializer[JsonPathCheckType, JsonNode](Body, preparer)
}
}
| gatling/gatling | gatling-http/src/main/scala/io/gatling/http/check/body/HttpBodyJsonPathCheckMaterializer.scala | Scala | apache-2.0 | 1,385 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.columnar
import org.apache.commons.lang3.StringUtils
import org.apache.spark.network.util.JavaUtils
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.logical
import org.apache.spark.sql.catalyst.plans.logical.Statistics
import org.apache.spark.sql.execution.SparkPlan
import org.apache.spark.storage.StorageLevel
import org.apache.spark.util.LongAccumulator
object InMemoryRelation {
def apply(
useCompression: Boolean,
batchSize: Int,
storageLevel: StorageLevel,
child: SparkPlan,
tableName: Option[String]): InMemoryRelation =
new InMemoryRelation(child.output, useCompression, batchSize, storageLevel, child, tableName)()
}
/**
* CachedBatch is a cached batch of rows.
*
* @param numRows The total number of rows in this batch
* @param buffers The buffers for serialized columns
* @param stats The stat of columns
*/
private[columnar]
case class CachedBatch(numRows: Int, buffers: Array[Array[Byte]], stats: InternalRow)
case class InMemoryRelation(
output: Seq[Attribute],
useCompression: Boolean,
batchSize: Int,
storageLevel: StorageLevel,
@transient child: SparkPlan,
tableName: Option[String])(
@transient var _cachedColumnBuffers: RDD[CachedBatch] = null,
val batchStats: LongAccumulator = child.sqlContext.sparkContext.longAccumulator)
extends logical.LeafNode with MultiInstanceRelation {
override protected def innerChildren: Seq[SparkPlan] = Seq(child)
override def producedAttributes: AttributeSet = outputSet
@transient val partitionStatistics = new PartitionStatistics(output)
override def computeStats(): Statistics = {
if (batchStats.value == 0L) {
// Underlying columnar RDD hasn't been materialized, no useful statistics information
// available, return the default statistics.
Statistics(sizeInBytes = child.sqlContext.conf.defaultSizeInBytes)
} else {
Statistics(sizeInBytes = batchStats.value.longValue)
}
}
// If the cached column buffers were not passed in, we calculate them in the constructor.
// As in Spark, the actual work of caching is lazy.
if (_cachedColumnBuffers == null) {
buildBuffers()
}
private def buildBuffers(): Unit = {
val output = child.output
val cached = child.execute().mapPartitionsInternal { rowIterator =>
new Iterator[CachedBatch] {
def next(): CachedBatch = {
val columnBuilders = output.map { attribute =>
ColumnBuilder(attribute.dataType, batchSize, attribute.name, useCompression)
}.toArray
var rowCount = 0
var totalSize = 0L
while (rowIterator.hasNext && rowCount < batchSize
&& totalSize < ColumnBuilder.MAX_BATCH_SIZE_IN_BYTE) {
val row = rowIterator.next()
// Added for SPARK-6082. This assertion can be useful for scenarios when something
// like Hive TRANSFORM is used. The external data generation script used in TRANSFORM
// may result malformed rows, causing ArrayIndexOutOfBoundsException, which is somewhat
// hard to decipher.
assert(
row.numFields == columnBuilders.length,
s"Row column number mismatch, expected ${output.size} columns, " +
s"but got ${row.numFields}." +
s"\\nRow content: $row")
var i = 0
totalSize = 0
while (i < row.numFields) {
columnBuilders(i).appendFrom(row, i)
totalSize += columnBuilders(i).columnStats.sizeInBytes
i += 1
}
rowCount += 1
}
batchStats.add(totalSize)
val stats = InternalRow.fromSeq(
columnBuilders.flatMap(_.columnStats.collectedStatistics))
CachedBatch(rowCount, columnBuilders.map { builder =>
JavaUtils.bufferToArray(builder.build())
}, stats)
}
def hasNext: Boolean = rowIterator.hasNext
}
}.persist(storageLevel)
cached.setName(
tableName.map(n => s"In-memory table $n")
.getOrElse(StringUtils.abbreviate(child.toString, 1024)))
_cachedColumnBuffers = cached
}
def withOutput(newOutput: Seq[Attribute]): InMemoryRelation = {
InMemoryRelation(
newOutput, useCompression, batchSize, storageLevel, child, tableName)(
_cachedColumnBuffers, batchStats)
}
override def newInstance(): this.type = {
new InMemoryRelation(
output.map(_.newInstance()),
useCompression,
batchSize,
storageLevel,
child,
tableName)(
_cachedColumnBuffers,
batchStats).asInstanceOf[this.type]
}
def cachedColumnBuffers: RDD[CachedBatch] = _cachedColumnBuffers
override protected def otherCopyArgs: Seq[AnyRef] =
Seq(_cachedColumnBuffers, batchStats)
}
| akopich/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryRelation.scala | Scala | apache-2.0 | 5,874 |
package play.modules.rediscala
import play.api._
import java.net.URI
import scala.Some
import redis.RedisClient
import akka.actor.ActorSystem
class RedisPlugin(app: Application) extends Plugin {
lazy val confs:Map[String, (String, Int, Option[(String, String)])] =
app.configuration.getConfig("redis").fold(Map("default" -> RedisPlugin.parseConf(app.configuration))) { conf =>
conf.subKeys.map(key => (key, RedisPlugin.parseConf(app.configuration, key))).toMap
}
override def onStart() {
Logger.info("RedisPlugin starting...")
confs
Logger.info("RedisPlugin started")
}
def client(db:String)(implicit system:ActorSystem):RedisClient = confs.get(db) match {
case Some(conf) => new RedisClient(conf._1, conf._2, conf._3.map(_._2))
case _ => throw new PlayException("RedisPlugin Error", s"No configuration found for db $db")
}
}
object RedisPlugin {
def client(db: String = "default")(implicit app: Application, system: ActorSystem) = current.client(db)
def current(implicit app: Application): RedisPlugin = app.plugin[RedisPlugin] match {
case Some(plugin) => plugin
case _ => throw new PlayException("RedisPlugin Error", "The RedisPlugin has not been initialized! Please edit your conf/play.plugins file and add the following line: '400:play.modules.rediscala.RedisPlugin' (400 is an arbitrary priority and may be changed to match your needs).")
}
val default:(String, Int, Option[(String, String)]) = ("localhost", 6379, None)
def parseConf(configuration: Configuration, name: String = "default"):(String, Int, Option[(String, String)]) = {
configuration.getConfig("redis."+name).fold(default){ conf =>
parseConf(conf.getString("uri"), conf.getString("host"), conf.getInt("port"), conf.getString("user"), conf.getString("password"))
}
}
def parseConf(uri: Option[String], host: Option[String], port: Option[Int], user:Option[String], password:Option[String]):(String, Int, Option[(String, String)]) = {
val auth = for{
u <- user
p <- password
} yield (u, p)
uri.fold[(String, Int, Option[(String, String)])]((host.getOrElse(default._1), port.getOrElse(default._2), auth.orElse(default._3)))(parseURI)
}
private def parseURI(uri: String):(String, Int, Option[(String, String)]) = {
val jUri = new URI(uri);
val port = jUri.getPort match {
case -1 => default._2
case p:Int => p
}
val userInfo = Option(jUri.getUserInfo).map {
_.split(":").toList match {
case username :: password => (username, password.mkString)
case _ => null
}
}
(jUri.getHost, port, userInfo)
}
} | njin-fr/play2-rediscala | src/main/scala/play/modules/rediscala/RedisPlugin.scala | Scala | apache-2.0 | 2,666 |
/**
* Copyright (C) 2013 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.test
import org.mockito.invocation.InvocationOnMock
import org.mockito.stubbing.Answer
import org.mockito.{Matchers, Mockito}
import org.orbeon.oxf.externalcontext.ExternalContext
import org.orbeon.oxf.pipeline.InitUtils
import org.orbeon.oxf.util.IndentedLogger
import org.orbeon.oxf.xforms.action.XFormsAPI._
import org.orbeon.oxf.xforms.action.XFormsActionInterpreter
import org.orbeon.oxf.xforms.control.Controls.ControlsIterator
import org.orbeon.oxf.xforms.control.controls.XFormsSelect1Control
import org.orbeon.oxf.xforms.control.{XFormsComponentControl, XFormsControl, XFormsSingleNodeControl, XFormsValueControl}
import org.orbeon.oxf.xforms.event.XFormsEvents._
import org.orbeon.oxf.xforms.event.events.XXFormsValueEvent
import org.orbeon.oxf.xforms.event.{ClientEvents, Dispatch, XFormsCustomEvent, XFormsEventTarget}
import org.orbeon.oxf.xforms.itemset.Itemset
import org.orbeon.oxf.xforms.model.XFormsInstance
import org.orbeon.oxf.xforms.processor.XFormsServer
import org.orbeon.oxf.xforms.state.XFormsStateManager
import org.orbeon.oxf.xforms.xbl.XBLContainer
import org.orbeon.oxf.xforms.{XFormsContainingDocument, XFormsObject}
import org.orbeon.oxf.xml.TransformerUtils
import org.scalatest.mockito.MockitoSugar
import scala.reflect.ClassTag
trait XFormsSupport extends MockitoSugar {
self: DocumentTestBase β
def withTestExternalContext[T](body: ExternalContext β T): T =
InitUtils.withPipelineContext { pipelineContext β
body(
PipelineSupport.setExternalContext(
pipelineContext,
PipelineSupport.DefaultRequestUrl,
XFormsStateManager.sessionCreated,
XFormsStateManager.sessionDestroyed
)
)
}
def withActionAndDoc[T](url: String)(body: β T): T =
withActionAndDoc(setupDocument(url))(body)
def withActionAndDoc[T](doc: XFormsContainingDocument)(body: β T): T =
withScalaAction(mockActionInterpreter(doc)) {
withContainingDocument(doc) {
body
}
}
def withAction[T](body: β T): T = {
document.startOutermostActionHandler()
val result = withScalaAction(mockActionInterpreter(inScopeContainingDocument))(body)
document.endOutermostActionHandler()
result
}
private def mockActionInterpreter(doc: XFormsContainingDocument) = {
val actionInterpreter = mock[XFormsActionInterpreter]
Mockito when actionInterpreter.containingDocument thenReturn doc
Mockito when actionInterpreter.container thenReturn doc
Mockito when actionInterpreter.indentedLogger thenReturn new IndentedLogger(XFormsServer.logger)
// Resolve assuming target relative to the document
Mockito when actionInterpreter.resolveObject(Matchers.anyObject(), Matchers.anyString) thenAnswer new Answer[XFormsObject] {
def answer(invocation: InvocationOnMock) = {
val targetStaticOrAbsoluteId = invocation.getArguments()(1).asInstanceOf[String]
doc.resolveObjectById("#document", targetStaticOrAbsoluteId, None)
}
}
actionInterpreter
}
// Dispatch a custom event to the object with the given prefixed id
def dispatch(name: String, effectiveId: String) =
Dispatch.dispatchEvent(
new XFormsCustomEvent(
name,
document.getObjectByEffectiveId(effectiveId).asInstanceOf[XFormsEventTarget],
Map(),
bubbles = true,
cancelable = true)
)
// Get a top-level instance
def instance(instanceStaticId: String) =
document.findInstance(instanceStaticId)
// Convert an instance to a string
def instanceToString(instance: XFormsInstance) =
TransformerUtils.tinyTreeToString(instance.documentInfo)
def getControlValue(controlEffectiveId: String) = getValueControl(controlEffectiveId).getValue
def getControlExternalValue(controlEffectiveId: String) = getValueControl(controlEffectiveId).getExternalValue
def setControlValue(controlEffectiveId: String, value: String): Unit = {
// This stores the value without testing for readonly
document.startOutermostActionHandler()
getValueControl(controlEffectiveId).storeExternalValue(value)
document.endOutermostActionHandler()
}
def setControlValueWithEventSearchNested(controlEffectiveId: String, value: String): Unit = {
def process(target: XFormsEventTarget) = {
ClientEvents.processEvent(document, new XXFormsValueEvent(target, value))
document.afterExternalEvents()
document.afterUpdateResponse()
document.beforeExternalEvents(null)
}
getObject(controlEffectiveId) match {
case c: XFormsControl β
ControlsIterator(c, includeSelf = true) collectFirst {
case vc: XFormsValueControl if vc.allowExternalEvent(XXFORMS_VALUE) β vc
} foreach process
case _ β
}
}
def isRelevant(controlEffectiveId: String) = getObject(controlEffectiveId).asInstanceOf[XFormsControl].isRelevant
def isRequired(controlEffectiveId: String) = getSingleNodeControl(controlEffectiveId).isRequired
def isReadonly(controlEffectiveId: String) = getSingleNodeControl(controlEffectiveId).isReadonly
def isValid(controlEffectiveId: String) = getSingleNodeControl(controlEffectiveId).isValid
def getType(controlEffectiveId: String) = getSingleNodeControl(controlEffectiveId).valueType
def hasFocus(controlEffectiveId: String) = document.getControls.getFocusedControl exists (_ eq getSingleNodeControl(controlEffectiveId))
def getItemset(controlEffectiveId: String) = {
val select1 = getObject(controlEffectiveId).asInstanceOf[XFormsSelect1Control]
select1.getItemset.asJSON(null, select1.mustEncodeValues, null)
}
def getItemsetSearchNested(control: XFormsControl): Option[Itemset] = control match {
case c: XFormsSelect1Control β Some(c.getItemset)
case c: XFormsComponentControl β ControlsIterator(c, includeSelf = false) collectFirst { case c: XFormsSelect1Control β c.getItemset }
case _ β None
}
val DocId = "#document"
def resolveObject[T: ClassTag](
staticOrAbsoluteId : String,
sourceEffectiveId : String = DocId,
indexes : List[Int] = Nil,
container : XBLContainer = document
): Option[T] = {
val resolvedOpt =
container.resolveObjectByIdInScope(sourceEffectiveId, staticOrAbsoluteId) collect {
case result if indexes.nonEmpty β
document.getObjectByEffectiveId(Dispatch.resolveRepeatIndexes(container, result, container.prefixedId, indexes mkString " "))
case result β
result
}
resolvedOpt collect { case c: T β c }
}
def getControl(controlEffectiveId: String) = getObject(controlEffectiveId).asInstanceOf[XFormsControl]
def getSingleNodeControl(controlEffectiveId: String) = getObject(controlEffectiveId).asInstanceOf[XFormsSingleNodeControl]
def getValueControl(controlEffectiveId: String) = getObject(controlEffectiveId).asInstanceOf[XFormsValueControl]
def getObject(effectiveId: String) = document.getObjectByEffectiveId(effectiveId)
}
| brunobuzzi/orbeon-forms | xforms/jvm/src/test/scala/org/orbeon/oxf/test/XFormsSupport.scala | Scala | lgpl-2.1 | 7,742 |
package uk.gov.gds.ier.test
import uk.gov.gds.ier.guice.WithRemoteAssets
import uk.gov.gds.ier.assets.RemoteAssets
import org.scalatest.mock.MockitoSugar
trait WithMockRemoteAssets
extends WithRemoteAssets {
private val mockito = new MockitoSugar {}
val remoteAssets = mockito.mock[RemoteAssets]
}
| michaeldfallen/ier-frontend | test/uk/gov/gds/ier/test/WithMockRemoteAssets.scala | Scala | mit | 311 |
package dotty.tools.scripting
import scala.language.unsafeNulls
import java.nio.file.{ Files, Paths, Path }
import dotty.tools.dotc.Driver
import dotty.tools.dotc.core.Contexts, Contexts.{ Context, ctx }
import dotty.tools.io.{ PlainDirectory, Directory, ClassPath }
import Util.*
class StringDriver(compilerArgs: Array[String], scalaSource: String) extends Driver:
override def sourcesRequired: Boolean = false
def compileAndRun(classpath: List[String] = Nil): Unit =
val outDir = Files.createTempDirectory("scala3-expression")
outDir.toFile.deleteOnExit()
setup(compilerArgs, initCtx.fresh) match
case Some((toCompile, rootCtx)) =>
given Context = rootCtx.fresh.setSetting(rootCtx.settings.outputDir,
new PlainDirectory(Directory(outDir)))
val compiler = newCompiler
compiler.newRun.compileFromStrings(List(scalaSource))
val output = ctx.settings.outputDir.value
if ctx.reporter.hasErrors then
throw StringDriverException("Errors encountered during compilation")
try
val classpath = s"${ctx.settings.classpath.value}${pathsep}${sys.props("java.class.path")}"
val classpathEntries: Seq[Path] = ClassPath.expandPath(classpath, expandStar=true).map { Paths.get(_) }
sys.props("java.class.path") = classpathEntries.map(_.toString).mkString(pathsep)
val (mainClass, mainMethod) = detectMainClassAndMethod(outDir, classpathEntries, scalaSource)
mainMethod.invoke(null, Array.empty[String])
catch
case e: java.lang.reflect.InvocationTargetException =>
throw e.getCause
finally
deleteFile(outDir.toFile)
case None =>
end compileAndRun
end StringDriver
case class StringDriverException(msg: String) extends RuntimeException(msg)
| lampepfl/dotty | compiler/src/dotty/tools/scripting/StringDriver.scala | Scala | apache-2.0 | 1,829 |
/*
* ******************************************************************************
* * Copyright (C) 2013 Christopher Harris (Itszuvalex)
* * [email protected]
* *
* * This program is free software; you can redistribute it and/or
* * modify it under the terms of the GNU General Public License
* * as published by the Free Software Foundation; either version 2
* * of the License, or (at your option) any later version.
* *
* * This program is distributed in the hope that it will be useful,
* * but WITHOUT ANY WARRANTY; without even the implied warranty of
* * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* * GNU General Public License for more details.
* *
* * You should have received a copy of the GNU General Public License
* * along with this program; if not, write to the Free Software
* * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
* *****************************************************************************
*/
package com.itszuvalex.femtocraft.power.fluids
import com.itszuvalex.femtocraft.Femtocraft
import net.minecraft.item.EnumRarity
import net.minecraftforge.common.util.ForgeDirection
import net.minecraftforge.fluids.Fluid
/**
* Created by Christopher Harris (Itszuvalex) on 8/2/14.
*/
class FluidCooledMoltenSalt extends Fluid("Cooled Molten Salt") {
setUnlocalizedName("FluidCooledMoltenSalt")
setLuminosity(1)
setDensity(5000)
setTemperature(1200)
setViscosity(3000)
setGaseous(false)
setRarity(EnumRarity.rare)
override def getStillIcon = Femtocraft.blockFluidCooledMoltenSalt.getBlockTextureFromSide(ForgeDirection.UP.ordinal)
override def getFlowingIcon = Femtocraft.blockFluidCooledMoltenSalt.getBlockTextureFromSide(ForgeDirection.NORTH.ordinal)
}
| Itszuvalex/Femtocraft-alpha-1 | src/main/java/com/itszuvalex/femtocraft/power/fluids/FluidCooledMoltenSalt.scala | Scala | gpl-2.0 | 1,797 |
// /scala/trac/5452/a.scala
// Mon Feb 13 22:52:36 PST 2012
// import scala.reflect.runtime.universe._
trait Tree
object Bip {
def ??? = sys.error("")
}
import Bip._
case class Queryable[T]() {
def treeFilter( t:Tree ) : Queryable[T] = ???
}
object Queryable {
def apply[T:Manifest] = ???
def apply[T:Manifest]( t:Tree ) = ???
}
trait CoffeesTable{
def sales : Int
}
object Test extends App{
val q = new Queryable[CoffeesTable]
Queryable[CoffeesTable]( q.treeFilter(null) )
}
| yusuke2255/dotty | tests/untried/neg/t5452-old.scala | Scala | bsd-3-clause | 499 |
package com.workshop
// Let's do a recursive solution
// Use match on the `n`
// You can use `|` to match on multiple constants
// you can use guard `case x if ... =>`
class Fibonacci {
def nth(n: Int): Int = ???
}
| maximn/scala-workshop | src/main/scala/com/workshop/Fibonacci.scala | Scala | mit | 218 |
/**
* Licensed to the Minutemen Group under one or more contributor license
* agreements. See the COPYRIGHT file distributed with this work for
* additional information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package silhouette.exceptions
/**
* Indicates a misconfiguration of a Silhouette component.
*
* @param msg The exception message.
* @param cause The exception cause.
*/
class ConfigurationException(msg: String, cause: Option[Throwable] = None)
extends SilhouetteException(msg, cause)
| datalek/silhouette | silhouette/src/main/scala/silhouette/exceptions/ConfigurationException.scala | Scala | apache-2.0 | 1,059 |
package com.learning.akka.actors
import akka.actor.{Props, ActorSystem, Actor}
/**
* Created by lgrcyanny on 15/12/22.
*/
class BecomeActor extends Actor {
def happy: Receive = {
case "foo" => println("I am happy with foo")
case "bar" => println("I am happy with bar")
}
def receive: Receive = {
case "foo" => context.become(happy)
case "bar" => println("I am bar")
}
}
object BecomeActor {
def main(args: Array[String]) {
val system = ActorSystem("BecomeTest")
val actor = system.actorOf(Props[BecomeActor], "BecomeActor")
actor ! "bar"
actor ! "foo"
actor ! "foo"
actor ! "foo"
}
}
| lgrcyanny/LearningAkka | src/main/scala/com/learning/akka/actors/BecomeActor.scala | Scala | gpl-2.0 | 641 |
/*
* Copyright 2015 LG CNS.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package scouter.server.netio.service.net
import scouter.server.util.ThreadScala
import scouter.util.IntKeyLinkedMap
import scouter.util.LinkedList
import scouter.util.RequestQueue
import scala.util.control.Breaks._
import scouter.server.Configure
import java.util.concurrent.Executors
object TcpAgentManager {
val pool = Executors.newFixedThreadPool(4)
val agentTable = new IntKeyLinkedMap[RequestQueue[TcpAgentWorker]]().setMax(5000)
ThreadScala.startDaemon("scouter.server.netio.service.net.TcpAgentManager", { true }, 5000) {
val keys = agentTable.keyArray()
for (k <- keys) {
val agentSessions = agentTable.get(k)
if (agentSessions != null) {
pool.execute(new Runnable() {
override def run() {
breakable {
val cnt = agentSessions.size()
for (k <- 0 to cnt) {
val item = agentSessions.getNoWait()
if (item == null) {
break
}
if (item.isExpired()) {
item.sendKeepAlive(3000)
}
if (item.isClosed() == false) {
agentSessions.put(item)
}
}
}
}
});
}
}
}
val conf = Configure.getInstance()
def add(objHash: Int, agent: TcpAgentWorker): Int = {
agentTable.synchronized {
var session = agentTable.get(objHash)
if (session == null) {
session = new RequestQueue[TcpAgentWorker](50);
agentTable.put(objHash, session)
}
session.put(agent);
return session.size()
}
}
def get(objHash: Int): TcpAgentWorker = {
var session = agentTable.get(objHash)
return if (session != null) session.get(conf.tcp_agent_max_wait) else null
}
}
| jhshin9/scouter | scouter.server/src/scouter/server/netio/service/net/TcpAgentManager.scala | Scala | apache-2.0 | 2,783 |
package org.faker
/**
* Generates Names of fake football [baseball, hockey, ...] teams.
*
* {{{
* scala> Faker.Team.name
* res38: String = Nevada Warlocks
* }}}
*/
object Team extends Base {
def name(implicit locale: FakerLocale = FakerLocale.default): String = parse("team.name").titlelize
}
| ralli/faker_scala | src/main/scala/org/faker/Team.scala | Scala | bsd-3-clause | 303 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.play.views.formatting
import org.joda.time.format.DateTimeFormat
import org.joda.time.{DateTime, DateTimeZone, LocalDate}
object Dates {
private[formatting] val dateFormat = DateTimeFormat.forPattern("d MMMM y").withZone(DateTimeZone.forID("Europe/London"))
private[formatting] val dateFormatAbbrMonth = DateTimeFormat.forPattern("d MMM y").withZone(DateTimeZone.forID("Europe/London"))
private[formatting] val shortDateFormat = DateTimeFormat.forPattern("yyyy-MM-dd").withZone(DateTimeZone.forID("Europe/London"))
private[formatting] val easyReadingDateFormat = DateTimeFormat.forPattern("EEEE d MMMM yyyy").withZone(DateTimeZone.forID("Europe/London"))
private[formatting] val easyReadingTimestampFormat = DateTimeFormat.forPattern("h:mmaa").withZone(DateTimeZone.forID("Europe/London"))
def formatDate(date: LocalDate) = dateFormat.print(date)
def formatDateAbbrMonth(date: LocalDate) = dateFormatAbbrMonth.print(date)
def formatDate(date: Option[LocalDate], default: String) = date match {
case Some(d) => dateFormat.print(d)
case None => default
}
def formatDateTime(date: DateTime) = dateFormat.print(date)
def formatEasyReadingTimestamp(date: Option[DateTime], default: String) = date match {
case Some(d) => {
s"${easyReadingTimestampFormat.print(d).toLowerCase}, ${easyReadingDateFormat.print(d)}"
}
case None => default
}
def shortDate(date: LocalDate) = shortDateFormat.print(date)
def formatDays(days: Int) = s"$days day${if(days > 1) "s" else ""}"
}
| benaveryee/play-ui | src/main/twirl/uk/gov/hmrc/play/views/formatting/Dates.scala | Scala | apache-2.0 | 2,153 |
package geotrellis.network.graph
import geotrellis.network._
import geotrellis.network.graph._
import scala.collection.mutable
import spire.syntax.cfor._
object EdgeSet {
def apply(vertex:Vertex) = new EdgeSet(vertex)
}
class EdgeSet(val vertex:Vertex) extends Iterable[Edge] {
val edgesToTargets = mutable.Map[Vertex,mutable.ListBuffer[Edge]]()
def edges =
edgesToTargets.values.flatten
def iterator =
edges.iterator
private val edgeCounts = mutable.Map[TransitMode,Int]()
def edgeCount(mode:TransitMode) = edgeCounts.getOrElse(mode,0)
def addEdge(edge:Edge):Unit = {
val target = edge.target
if(!edgesToTargets.contains(target)) { edgesToTargets(target) = mutable.ListBuffer[Edge]() }
edgesToTargets(target) += edge
if(!edgeCounts.contains(edge.mode)) { edgeCounts(edge.mode) = 0 }
edgeCounts(edge.mode) += 1
}
override
def toString = {
s"EdgeSet(${vertex})"
}
}
| flibbertigibbet/open-transit-indicators | scala/geotrellis-transit/src/main/scala/geotrellis/network/graph/EdgeSet.scala | Scala | gpl-3.0 | 931 |
/*
If a function body consists solely of a match expression, we'll often put the
match on the same line as the function signature, rather than introducing
another level of nesting.
*/
def setHead[A](l: List[A], h: A): List[A] = l match {
case Nil => sys.error("setHead on empty list")
case Cons(_,t) => Cons(h,t)
}
| mebubo/fpinscala | answerkey/datastructures/03.answer.scala | Scala | mit | 319 |
/*
* Copyright (c) 2013 Bridgewater Associates, LP
*
* Distributed under the terms of the Modified BSD License. The full license is in
* the file COPYING, distributed as part of this software.
*/
package notebook.kernel
//import org.clapper.util.classutil.ClassUtil
import notebook.util.StringCompletor
object StringCompletorResolver {
lazy val completor = {
val className = "notebook.kernel.TestStringCompletor"
//ClassUtil.instantiateClass(className).asInstanceOf[StringCompletor]
Class.forName(className).getConstructor().newInstance().asInstanceOf[StringCompletor]
}
}
| bigdatagenomics/mango-notebook | modules/kernel/src/main/scala/notebook/kernel/StringCompletorResolver.scala | Scala | apache-2.0 | 601 |
/*
* SPDX-License-Identifier: Apache-2.0
* Copyright 2016-2020 Daniel Urban and contributors listed in NOTICE.txt
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dev.tauri.choam
package kcas
import org.openjdk.jcstress.annotations._
import org.openjdk.jcstress.annotations.Outcome.Outcomes
import org.openjdk.jcstress.annotations.Expect._
import org.openjdk.jcstress.infra.results.LL_Result
@JCStressTest
@State
@Description("IBRStackFast pop/push should be atomic")
@Outcomes(Array(
new Outcome(id = Array("z, List(x, y)", "z, List(y, x)"), expect = ACCEPTABLE, desc = "Pop is the first"),
new Outcome(id = Array("x, List(y, z)", "y, List(x, z)"), expect = ACCEPTABLE, desc = "Pop one of the pushed values")
))
class IBRStackFastTest {
private[this] val stack =
IBRStackFast[String]("z")
@Actor
def push1(): Unit = {
this.stack.push("x", IBRStackFast.threadLocalContext())
}
@Actor
def push2(): Unit = {
this.stack.push("y", IBRStackFast.threadLocalContext())
}
@Actor
def pop(r: LL_Result): Unit = {
r.r1 = this.stack.tryPop(IBRStackFast.threadLocalContext())
}
@Arbiter
def arbiter(r: LL_Result): Unit = {
r.r2 = stack.unsafeToList(IBRStackFast.threadLocalContext())
}
}
| durban/exp-reagents | stress/src/test/scala/dev/tauri/choam/kcas/IBRStackFastTest.scala | Scala | apache-2.0 | 1,756 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.entity.test
import java.util.Base64
import scala.concurrent.duration.DurationInt
import scala.language.postfixOps
import scala.util.Failure
import scala.util.Try
import org.junit.runner.RunWith
import org.scalatest.BeforeAndAfter
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import org.scalatest.junit.JUnitRunner
import spray.json._
import spray.json.DefaultJsonProtocol._
import org.apache.openwhisk.common.TransactionId
import org.apache.openwhisk.core.controller.test.WhiskAuthHelpers
import org.apache.openwhisk.core.entitlement.Privilege
import org.apache.openwhisk.core.entity.ExecManifest.{ImageName, RuntimeManifest}
import org.apache.openwhisk.core.entity._
import org.apache.openwhisk.core.entity.size.SizeInt
import org.apache.openwhisk.http.Messages
import org.apache.openwhisk.utils.JsHelpers
@RunWith(classOf[JUnitRunner])
class SchemaTests extends FlatSpec with BeforeAndAfter with ExecHelpers with Matchers {
behavior of "AuthKey"
behavior of "Privilege"
private implicit class ExecJson(e: Exec) {
def asJson: JsObject = Exec.serdes.write(e).asJsObject
}
it should "serdes a right" in {
Privilege.serdes.read("READ".toJson) shouldBe Privilege.READ
Privilege.serdes.read("read".toJson) shouldBe Privilege.READ
a[DeserializationException] should be thrownBy Privilege.serdes.read("???".toJson)
}
behavior of "TransactionId"
it should "serdes a transaction id without extraLogging parameter" in {
val txIdWithoutParameter = TransactionId("4711")
// test serialization
val serializedTxIdWithoutParameter = TransactionId.serdes.write(txIdWithoutParameter)
serializedTxIdWithoutParameter match {
case JsArray(Vector(JsString(id), JsNumber(_))) =>
assert(id == txIdWithoutParameter.meta.id)
case _ => withClue(serializedTxIdWithoutParameter) { assert(false) }
}
// test deserialization
val deserializedTxIdWithoutParameter = TransactionId.serdes.read(serializedTxIdWithoutParameter)
deserializedTxIdWithoutParameter.meta.id should equal(txIdWithoutParameter.meta.id)
deserializedTxIdWithoutParameter.meta.extraLogging should equal(false)
}
it should "serdes a transaction id with extraLogging parameter" in {
val txIdWithParameter = TransactionId("4711", true)
// test serialization
val serializedTxIdWithParameter = TransactionId.serdes.write(txIdWithParameter)
serializedTxIdWithParameter match {
case JsArray(Vector(JsString(id), JsNumber(_), JsBoolean(extraLogging))) =>
assert(id == txIdWithParameter.meta.id)
assert(extraLogging)
case _ => withClue(serializedTxIdWithParameter) { assert(false) }
}
// test deserialization
val deserializedTxIdWithParameter = TransactionId.serdes.read(serializedTxIdWithParameter)
deserializedTxIdWithParameter.meta.id should equal(txIdWithParameter.meta.id)
assert(deserializedTxIdWithParameter.meta.extraLogging)
}
behavior of "Identity"
it should "serdes write an identity" in {
val i = WhiskAuthHelpers.newIdentity()
val expected = JsObject(
"subject" -> i.subject.asString.toJson,
"namespace" -> i.namespace.toJson,
"authkey" -> i.authkey.toEnvironment,
"rights" -> Array("READ", "PUT", "DELETE", "ACTIVATE").toJson,
"limits" -> JsObject.empty)
Identity.serdes.write(i) shouldBe expected
}
it should "serdes read an generic identity" in {
val uuid = UUID()
val subject = Subject("test_subject")
val entity = EntityName("test_subject")
val genericAuthKey = new GenericAuthKey(JsObject("test_key" -> "test_value".toJson))
val i = WhiskAuthHelpers.newIdentity(subject, uuid, genericAuthKey)
val json = JsObject(
"subject" -> Subject("test_subject").toJson,
"namespace" -> Namespace(entity, uuid).toJson,
"authkey" -> JsObject("test_key" -> "test_value".toJson),
"rights" -> Array("READ", "PUT", "DELETE", "ACTIVATE").toJson,
"limits" -> JsObject.empty)
Identity.serdes.read(json) shouldBe i
}
behavior of "DocInfo"
it should "accept well formed doc info" in {
Seq("a", " a", "a ").foreach { i =>
val d = DocInfo(i)
assert(d.id.asString == i.trim)
}
}
it should "accept any string as doc revision" in {
Seq("a", " a", "a ", "", null).foreach { i =>
val d = DocRevision(i)
assert(d.rev == (if (i != null) i.trim else null))
}
DocRevision.serdes.read(JsNull) shouldBe DocRevision.empty
DocRevision.serdes.read(JsString("")) shouldBe DocRevision("")
DocRevision.serdes.read(JsString("a")) shouldBe DocRevision("a")
DocRevision.serdes.read(JsString(" a")) shouldBe DocRevision("a")
DocRevision.serdes.read(JsString("a ")) shouldBe DocRevision("a")
a[DeserializationException] should be thrownBy DocRevision.serdes.read(JsNumber(1))
}
it should "reject malformed doc info" in {
Seq(null, "", " ").foreach { i =>
an[IllegalArgumentException] should be thrownBy DocInfo(i)
}
}
it should "reject malformed doc ids" in {
Seq(null, "", " ").foreach { i =>
an[IllegalArgumentException] should be thrownBy DocId(i)
}
}
behavior of "EntityPath"
it should "accept well formed paths" in {
val paths = Seq(
"/a",
"//a",
"//a//",
"//a//b//c",
"//a//b/c//",
"a",
"a/b",
"a/b/",
"[email protected]",
"[email protected]/",
"[email protected]/d",
"_a/",
"_ _",
"a/b/c")
val expected =
Seq("a", "a", "a", "a/b/c", "a/b/c", "a", "a/b", "a/b", "[email protected]", "[email protected]", "[email protected]/d", "_a", "_ _", "a/b/c")
val spaces = paths.zip(expected).foreach { p =>
EntityPath(p._1).namespace shouldBe p._2
}
EntityPath.DEFAULT.addPath(EntityName("a")).toString shouldBe "_/a"
EntityPath.DEFAULT.addPath(EntityPath("a")).toString shouldBe "_/a"
EntityPath.DEFAULT.addPath(EntityPath("a/b")).toString shouldBe "_/a/b"
EntityPath.DEFAULT.resolveNamespace(EntityName("a")) shouldBe EntityPath("a")
EntityPath("a").resolveNamespace(EntityName("b")) shouldBe EntityPath("a")
EntityPath.DEFAULT.resolveNamespace(Namespace(EntityName("a"), UUID())) shouldBe EntityPath("a")
EntityPath("a").resolveNamespace(Namespace(EntityName("b"), UUID())) shouldBe EntityPath("a")
EntityPath("a").defaultPackage shouldBe true
EntityPath("a/b").defaultPackage shouldBe false
EntityPath("a").root shouldBe EntityName("a")
EntityPath("a").last shouldBe EntityName("a")
EntityPath("a/b").root shouldBe EntityName("a")
EntityPath("a/b").last shouldBe EntityName("b")
EntityPath("a").relativePath shouldBe empty
EntityPath("a/b").relativePath shouldBe Some(EntityPath("b"))
EntityPath("a/b/c").relativePath shouldBe Some(EntityPath("b/c"))
EntityPath("a/b").toFullyQualifiedEntityName shouldBe FullyQualifiedEntityName(EntityPath("a"), EntityName("b"))
}
it should "reject malformed paths" in {
val paths = Seq(
null,
"",
" ",
"a/ ",
"a/b/c ",
" xxx",
"xxx ",
" xxx",
"xxx/ ",
"/",
" /",
"/ ",
"//",
"///",
" / / / ",
"a/b/ c",
"a/ /b",
" a/ b")
paths.foreach { p =>
an[IllegalArgumentException] should be thrownBy EntityPath(p)
}
an[IllegalArgumentException] should be thrownBy EntityPath("a").toFullyQualifiedEntityName
}
behavior of "EntityName"
it should "accept well formed names" in {
val paths = Seq(
"a",
"a b",
"[email protected]&d",
"a@&b",
"_a",
"_",
"_ _",
"a0",
"a 0",
"a.0",
"a@@&",
"0",
"0.0",
"0.0.0",
"0a",
"0.a",
"a" * EntityName.ENTITY_NAME_MAX_LENGTH)
paths.foreach { n =>
assert(EntityName(n).toString == n)
}
}
it should "reject malformed names" in {
val paths = Seq(
null,
"",
" ",
" xxx",
"xxx ",
"/",
" /",
"/ ",
"0 ",
"a=2b",
"_ ",
"a?b",
"x#x",
"aΒ§b",
"a ",
"a()b",
"a{}b",
"a \\t",
"-abc",
"&abc",
"a\\n",
"a" * (EntityName.ENTITY_NAME_MAX_LENGTH + 1))
paths.foreach { p =>
an[IllegalArgumentException] should be thrownBy EntityName(p)
}
}
behavior of "FullyQualifiedEntityName"
it should "work with paths" in {
FullyQualifiedEntityName(EntityPath("a"), EntityName("b")).add(EntityName("c")) shouldBe
FullyQualifiedEntityName(EntityPath("a/b"), EntityName("c"))
FullyQualifiedEntityName(EntityPath("a"), EntityName("b")).fullPath shouldBe EntityPath("a/b")
}
it should "deserialize a fully qualified name without a version" in {
val names = Seq(
JsObject("path" -> "a".toJson, "name" -> "b".toJson),
JsObject("path" -> "a".toJson, "name" -> "b".toJson, "version" -> "0.0.1".toJson),
JsString("a/b"),
JsString("n/a/b"),
JsString("/a/b"),
JsString("/n/a/b"),
JsString("b")) //JsObject("namespace" -> "a".toJson, "name" -> "b".toJson))
FullyQualifiedEntityName.serdes.read(names(0)) shouldBe FullyQualifiedEntityName(EntityPath("a"), EntityName("b"))
FullyQualifiedEntityName.serdes.read(names(1)) shouldBe FullyQualifiedEntityName(
EntityPath("a"),
EntityName("b"),
Some(SemVer()))
FullyQualifiedEntityName.serdes.read(names(2)) shouldBe FullyQualifiedEntityName(EntityPath("a"), EntityName("b"))
FullyQualifiedEntityName.serdes.read(names(3)) shouldBe FullyQualifiedEntityName(EntityPath("n/a"), EntityName("b"))
FullyQualifiedEntityName.serdes.read(names(4)) shouldBe FullyQualifiedEntityName(EntityPath("a"), EntityName("b"))
FullyQualifiedEntityName.serdes.read(names(5)) shouldBe FullyQualifiedEntityName(EntityPath("n/a"), EntityName("b"))
a[DeserializationException] should be thrownBy FullyQualifiedEntityName.serdes.read(names(6))
a[DeserializationException] should be thrownBy FullyQualifiedEntityName.serdesAsDocId.read(names(0))
a[DeserializationException] should be thrownBy FullyQualifiedEntityName.serdesAsDocId.read(names(1))
FullyQualifiedEntityName.serdesAsDocId.read(names(2)) shouldBe FullyQualifiedEntityName(
EntityPath("a"),
EntityName("b"))
FullyQualifiedEntityName.serdesAsDocId.read(names(3)) shouldBe FullyQualifiedEntityName(
EntityPath("n/a"),
EntityName("b"))
FullyQualifiedEntityName.serdesAsDocId.read(names(4)) shouldBe FullyQualifiedEntityName(
EntityPath("a"),
EntityName("b"))
FullyQualifiedEntityName.serdesAsDocId.read(names(5)) shouldBe FullyQualifiedEntityName(
EntityPath("n/a"),
EntityName("b"))
a[DeserializationException] should be thrownBy FullyQualifiedEntityName.serdesAsDocId.read(names(6))
}
it should "resolve names that may or may not be fully qualified" in {
FullyQualifiedEntityName.resolveName(JsString("a"), EntityName("ns")) shouldBe Some(
EntityPath("ns/a").toFullyQualifiedEntityName)
FullyQualifiedEntityName.resolveName(JsString("/_/a"), EntityName("ns")) shouldBe Some(
EntityPath("ns/a").toFullyQualifiedEntityName)
FullyQualifiedEntityName.resolveName(JsString("_/a"), EntityName("ns")) shouldBe Some(
EntityPath("ns/_/a").toFullyQualifiedEntityName)
FullyQualifiedEntityName.resolveName(JsString("/_/a/b"), EntityName("ns")) shouldBe Some(
EntityPath("ns/a/b").toFullyQualifiedEntityName)
FullyQualifiedEntityName.resolveName(JsString("a/b"), EntityName("ns")) shouldBe Some(
EntityPath("ns/a/b").toFullyQualifiedEntityName)
FullyQualifiedEntityName.resolveName(JsString("a/b/c"), EntityName("ns")) shouldBe Some(
EntityPath("/a/b/c").toFullyQualifiedEntityName)
FullyQualifiedEntityName.resolveName(JsString("a/b/c/d"), EntityName("ns")) shouldBe None
FullyQualifiedEntityName.resolveName(JsString("/a"), EntityName("ns")) shouldBe None
FullyQualifiedEntityName.resolveName(JsString("/a/b"), EntityName("ns")) shouldBe Some(
EntityPath("/a/b").toFullyQualifiedEntityName)
FullyQualifiedEntityName.resolveName(JsString("/a/b/c"), EntityName("ns")) shouldBe Some(
EntityPath("/a/b/c").toFullyQualifiedEntityName)
FullyQualifiedEntityName.resolveName(JsString("/a/b/c/d"), EntityName("ns")) shouldBe None
FullyQualifiedEntityName.resolveName(JsString(""), EntityName("ns")) shouldBe None
}
behavior of "Binding"
it should "desiarilize legacy format" in {
val names =
Seq(
JsObject("namespace" -> "a".toJson, "name" -> "b".toJson),
JsObject.empty,
JsObject("name" -> "b".toJson),
JsNull)
Binding.optionalBindingDeserializer.read(names(0)) shouldBe Some(Binding(EntityName("a"), EntityName("b")))
Binding.optionalBindingDeserializer.read(names(1)) shouldBe None
a[DeserializationException] should be thrownBy Binding.optionalBindingDeserializer.read(names(2))
a[DeserializationException] should be thrownBy Binding.optionalBindingDeserializer.read(names(3))
}
it should "serialize optional binding to empty object" in {
Binding.optionalBindingSerializer.write(None) shouldBe JsObject.empty
}
behavior of "WhiskPackagePut"
it should "deserialize empty request" in {
WhiskPackagePut.serdes.read(JsObject.empty) shouldBe WhiskPackagePut()
//WhiskPackagePut.serdes.read(JsObject("binding" -> JsNull)) shouldBe WhiskPackagePut()
WhiskPackagePut.serdes.read(JsObject("binding" -> JsObject.empty)) shouldBe WhiskPackagePut()
//WhiskPackagePut.serdes.read(JsObject("binding" -> "a/b".toJson)) shouldBe WhiskPackagePut(binding = Some(Binding(EntityPath("a"), EntityName("b"))))
a[DeserializationException] should be thrownBy WhiskPackagePut.serdes.read(JsObject("binding" -> JsNull))
}
behavior of "WhiskPackage"
it should "not deserialize package without binding property" in {
val pkg = WhiskPackage(EntityPath("a"), EntityName("b"))
WhiskPackage.serdes.read(JsObject(pkg.toJson.fields + ("binding" -> JsObject.empty))) shouldBe pkg
a[DeserializationException] should be thrownBy WhiskPackage.serdes.read(JsObject(pkg.toJson.fields - "binding"))
}
it should "serialize package with empty binding property" in {
val pkg = WhiskPackage(EntityPath("a"), EntityName("b"))
WhiskPackage.serdes.write(pkg) shouldBe JsObject(
"namespace" -> "a".toJson,
"name" -> "b".toJson,
"binding" -> JsObject.empty,
"parameters" -> Parameters().toJson,
"version" -> SemVer().toJson,
"publish" -> JsBoolean(false),
"annotations" -> Parameters().toJson)
}
it should "serialize and deserialize package binding" in {
val pkg = WhiskPackage(EntityPath("a"), EntityName("b"), Some(Binding(EntityName("x"), EntityName("y"))))
val pkgAsJson = JsObject(
"namespace" -> "a".toJson,
"name" -> "b".toJson,
"binding" -> JsObject("namespace" -> "x".toJson, "name" -> "y".toJson),
"parameters" -> Parameters().toJson,
"version" -> SemVer().toJson,
"publish" -> JsBoolean(false),
"annotations" -> Parameters().toJson)
//val legacyPkgAsJson = JsObject(pkgAsJson.fields + ("binding" -> JsObject("namespace" -> "x".toJson, "name" -> "y".toJson)))
WhiskPackage.serdes.write(pkg) shouldBe pkgAsJson
WhiskPackage.serdes.read(pkgAsJson) shouldBe pkg
//WhiskPackage.serdes.read(legacyPkgAsJson) shouldBe pkg
}
behavior of "SemVer"
it should "parse semantic versions" in {
val semvers = Seq("0.0.1", "1", "1.2", "1.2.3.").map { SemVer(_) }
assert(semvers(0) == SemVer(0, 0, 1) && semvers(0).toString == "0.0.1")
assert(semvers(1) == SemVer(1, 0, 0) && semvers(1).toString == "1.0.0")
assert(semvers(2) == SemVer(1, 2, 0) && semvers(2).toString == "1.2.0")
assert(semvers(3) == SemVer(1, 2, 3) && semvers(3).toString == "1.2.3")
}
it should "permit leading zeros but strip them away" in {
val semvers = Seq("0.0.01", "01", "01.02", "01.02.003.").map { SemVer(_) }
assert(semvers(0) == SemVer(0, 0, 1))
assert(semvers(1) == SemVer(1, 0, 0))
assert(semvers(2) == SemVer(1, 2, 0))
assert(semvers(3) == SemVer(1, 2, 3))
}
it should "reject malformed semantic version" in {
val semvers = Seq("0", "0.0.0", "00.00.00", ".1", "-1", "0.-1.0", "0.0.-1", "xyz", "", null)
semvers.foreach { v =>
val thrown = intercept[IllegalArgumentException] {
SemVer(v)
}
assert(thrown.getMessage.contains("bad semantic version"))
}
}
it should "reject negative values" in {
an[IllegalArgumentException] should be thrownBy SemVer(-1, 0, 0)
an[IllegalArgumentException] should be thrownBy SemVer(0, -1, 0)
an[IllegalArgumentException] should be thrownBy SemVer(0, 0, -1)
an[IllegalArgumentException] should be thrownBy SemVer(0, 0, 0)
}
behavior of "Exec"
it should "initialize exec manifest" in {
val runtimes = ExecManifest.runtimesManifest
runtimes.resolveDefaultRuntime("nodejs:default").get.kind shouldBe "nodejs:6"
runtimes.resolveDefaultRuntime("swift").get.deprecated shouldBe Some(true)
}
it should "properly deserialize and reserialize JSON" in {
val b64Body = """ZnVuY3Rpb24gbWFpbihhcmdzKSB7IHJldHVybiBhcmdzOyB9Cg=="""
val json = Seq[JsObject](
JsObject("kind" -> "nodejs:6".toJson, "code" -> "js1".toJson, "binary" -> false.toJson),
JsObject("kind" -> "nodejs:6".toJson, "code" -> "js2".toJson, "binary" -> false.toJson, "foo" -> "bar".toJson),
JsObject("kind" -> "swift".toJson, "code" -> "swift1".toJson, "binary" -> false.toJson),
JsObject("kind" -> "swift:3.1.1".toJson, "code" -> b64Body.toJson, "binary" -> true.toJson),
JsObject("kind" -> "nodejs:6".toJson, "code" -> b64Body.toJson, "binary" -> true.toJson))
val execs = json.map { e =>
Exec.serdes.read(e)
}
assert(execs(0) == jsDefault("js1") && json(0) == jsDefault("js1").asJson)
assert(execs(1) == jsDefault("js2") && json(1) != jsDefault("js2").asJson) // ignores unknown properties
assert(execs(2) == swift("swift1") && json(2) == swift("swift1").asJson)
assert(execs(3) == swift3(b64Body) && json(3) == swift3(b64Body).asJson)
assert(execs(4) == jsDefault(b64Body) && json(4) == jsDefault(b64Body).asJson)
}
it should "properly deserialize and reserialize JSON blackbox" in {
val b64 = Base64.getEncoder()
val contents = b64.encodeToString("tarball".getBytes)
val json = Seq[JsObject](
JsObject("kind" -> "blackbox".toJson, "image" -> "container1".toJson, "binary" -> false.toJson),
JsObject(
"kind" -> "blackbox".toJson,
"image" -> "container1".toJson,
"binary" -> true.toJson,
"code" -> contents.toJson),
JsObject(
"kind" -> "blackbox".toJson,
"image" -> "container1".toJson,
"binary" -> true.toJson,
"code" -> contents.toJson,
"main" -> "naim".toJson))
val execs = json.map { e =>
Exec.serdes.read(e)
}
execs(0) shouldBe bb("container1")
execs(1) shouldBe bb("container1", contents)
execs(2) shouldBe bb("container1", contents, Some("naim"))
json(0) shouldBe bb("container1").asJson
json(1) shouldBe bb("container1", contents).asJson
json(2) shouldBe bb("container1", contents, Some("naim")).asJson
execs(0) shouldBe Exec.serdes.read(
JsObject(
"kind" -> "blackbox".toJson,
"image" -> "container1".toJson,
"binary" -> false.toJson,
"code" -> " ".toJson))
execs(0) shouldBe Exec.serdes.read(
JsObject(
"kind" -> "blackbox".toJson,
"image" -> "container1".toJson,
"binary" -> false.toJson,
"code" -> "".toJson))
}
it should "exclude undefined code in whisk action initializer" in {
ExecutableWhiskAction(EntityPath("a"), EntityName("b"), bb("container1")).containerInitializer shouldBe {
JsObject("name" -> "b".toJson, "binary" -> false.toJson, "main" -> "main".toJson)
}
ExecutableWhiskAction(EntityPath("a"), EntityName("b"), bb("container1", "xyz")).containerInitializer shouldBe {
JsObject("name" -> "b".toJson, "binary" -> false.toJson, "main" -> "main".toJson, "code" -> "xyz".toJson)
}
ExecutableWhiskAction(EntityPath("a"), EntityName("b"), bb("container1", "", Some("naim"))).containerInitializer shouldBe {
JsObject("name" -> "b".toJson, "binary" -> false.toJson, "main" -> "naim".toJson)
}
}
it should "compare as equal two actions even if their revision does not match" in {
val exec = CodeExecAsString(RuntimeManifest("actionKind", ImageName("testImage")), "testCode", None)
val actionA = WhiskAction(EntityPath("actionSpace"), EntityName("actionName"), exec)
val actionB = actionA.copy()
val actionC = actionA.copy()
actionC.revision(DocRevision("2"))
actionA shouldBe actionB
actionA shouldBe actionC
}
it should "compare as equal two executable actions even if their revision does not match" in {
val exec = CodeExecAsString(RuntimeManifest("actionKind", ImageName("testImage")), "testCode", None)
val actionA = ExecutableWhiskAction(EntityPath("actionSpace"), EntityName("actionName"), exec)
val actionB = actionA.copy()
val actionC = actionA.copy()
actionC.revision(DocRevision("2"))
actionA shouldBe actionB
actionA shouldBe actionC
}
it should "reject malformed JSON" in {
val b64 = Base64.getEncoder()
val contents = b64.encodeToString("tarball".getBytes)
val execs = Seq[JsValue](
null,
JsObject.empty,
JsNull,
JsObject("init" -> "zipfile".toJson),
JsObject("kind" -> "nodejs:6".toJson, "code" -> JsNumber(42)),
JsObject("kind" -> "nodejs:6".toJson, "init" -> "zipfile".toJson),
JsObject("kind" -> "turbopascal".toJson, "code" -> "BEGIN1".toJson),
JsObject("kind" -> "blackbox".toJson, "code" -> "js".toJson),
JsObject("kind" -> "swift".toJson, "swiftcode" -> "swift".toJson))
execs.foreach { e =>
withClue(if (e != null) e else "null") {
val thrown = intercept[Throwable] {
Exec.serdes.read(e)
}
thrown match {
case _: DeserializationException =>
case _: IllegalArgumentException =>
case t => assert(false, "Unexpected exception:" + t)
}
}
}
}
it should "reject null code/image arguments" in {
an[IllegalArgumentException] should be thrownBy Exec.serdes.read(null)
a[DeserializationException] should be thrownBy Exec.serdes.read("{}" parseJson)
a[DeserializationException] should be thrownBy Exec.serdes.read(JsString(""))
}
it should "serialize to json" in {
val execs = Seq(bb("container"), jsDefault("js"), jsDefault("js"), swift("swift")).map { _.asJson }
assert(execs(0) == JsObject("kind" -> "blackbox".toJson, "image" -> "container".toJson, "binary" -> false.toJson))
assert(execs(1) == JsObject("kind" -> "nodejs:6".toJson, "code" -> "js".toJson, "binary" -> false.toJson))
assert(execs(2) == JsObject("kind" -> "nodejs:6".toJson, "code" -> "js".toJson, "binary" -> false.toJson))
assert(execs(3) == JsObject("kind" -> "swift".toJson, "code" -> "swift".toJson, "binary" -> false.toJson))
}
behavior of "Parameter"
it should "properly deserialize and reserialize JSON" in {
val json = Seq[JsValue](
JsArray(JsObject("key" -> "k".toJson, "value" -> "v".toJson)),
JsArray(JsObject("key" -> "k".toJson, "value" -> "v".toJson, "foo" -> "bar".toJson)),
JsArray(JsObject("key" -> "k".toJson, "value" -> 3.toJson)),
JsArray(JsObject("key" -> "k".toJson, "value" -> Vector(false, true).toJson)))
val params = json.map { p =>
Parameters.serdes.read(p)
}
assert(params(0) == Parameters("k", "v"))
assert(params(1) == Parameters("k", "v"))
assert(params(0).toString == json(0).compactPrint)
assert(params(1).toString == json(0).compactPrint) // drops unknown prop "foo"
assert(params(1).toString != json(1).compactPrint) // drops unknown prop "foo"
assert(params(2).toString == json(2).compactPrint) // drops unknown prop "foo"
assert(params(3).toString == json(3).compactPrint) // drops unknown prop "foo"
}
it should "filter immutable parameters" in {
val params = Parameters("k", "v") ++ Parameters("ns", null: String) ++ Parameters("njs", JsNull)
params.definedParameters shouldBe Set("k")
}
it should "reject malformed JSON" in {
val params = Seq[JsValue](
null,
JsObject.empty,
JsObject("key" -> "k".toJson),
JsObject("value" -> "v".toJson),
JsObject("key" -> JsNull, "value" -> "v".toJson),
JsObject("key" -> "k".toJson, ("value" -> JsNull)),
JsObject("key" -> JsNull, "value" -> JsNull),
JsObject("KEY" -> "k".toJson, "VALUE" -> "v".toJson),
JsObject("key" -> "k".toJson, "value" -> 0.toJson))
params.foreach { p =>
a[DeserializationException] should be thrownBy Parameters.serdes.read(p)
}
}
it should "reject undefined key" in {
a[DeserializationException] should be thrownBy Parameters.serdes.read(null: JsValue)
an[IllegalArgumentException] should be thrownBy Parameters(null, null: String)
an[IllegalArgumentException] should be thrownBy Parameters("", null: JsValue)
an[IllegalArgumentException] should be thrownBy Parameters(" ", null: String)
an[IllegalArgumentException] should be thrownBy Parameters(null, "")
an[IllegalArgumentException] should be thrownBy Parameters(null, " ")
an[IllegalArgumentException] should be thrownBy Parameters(null)
}
it should "serialize to json" in {
assert(
Parameters("k", null: String).toString == JsArray(JsObject("key" -> "k".toJson, "value" -> JsNull)).compactPrint)
assert(Parameters("k", "").toString == JsArray(JsObject("key" -> "k".toJson, "value" -> "".toJson)).compactPrint)
assert(Parameters("k", " ").toString == JsArray(JsObject("key" -> "k".toJson, "value" -> "".toJson)).compactPrint)
assert(Parameters("k", "v").toString == JsArray(JsObject("key" -> "k".toJson, "value" -> "v".toJson)).compactPrint)
}
behavior of "ActionLimits"
it should "properly deserialize JSON" in {
val json = Seq[JsValue](
JsObject(
"timeout" -> TimeLimit.STD_DURATION.toMillis.toInt.toJson,
"memory" -> MemoryLimit.stdMemory.toMB.toInt.toJson,
"logs" -> LogLimit.stdLogSize.toMB.toInt.toJson,
"concurrency" -> ConcurrencyLimit.stdConcurrent.toInt.toJson),
JsObject(
"timeout" -> TimeLimit.STD_DURATION.toMillis.toInt.toJson,
"memory" -> MemoryLimit.stdMemory.toMB.toInt.toJson,
"logs" -> LogLimit.stdLogSize.toMB.toInt.toJson,
"concurrency" -> ConcurrencyLimit.stdConcurrent.toInt.toJson,
"foo" -> "bar".toJson),
JsObject(
"timeout" -> TimeLimit.STD_DURATION.toMillis.toInt.toJson,
"memory" -> MemoryLimit.stdMemory.toMB.toInt.toJson))
val limits = json.map(ActionLimits.serdes.read)
assert(limits(0) == ActionLimits())
assert(limits(1) == ActionLimits())
assert(limits(2) == ActionLimits())
assert(limits(0).toJson == json(0))
assert(limits(1).toJson == json(0)) // drops unknown prop "foo"
assert(limits(1).toJson != json(1)) // drops unknown prop "foo"
}
it should "reject malformed JSON" in {
val limits = Seq[JsValue](
null,
JsObject.empty,
JsNull,
JsObject("timeout" -> TimeLimit.STD_DURATION.toMillis.toInt.toJson),
JsObject("memory" -> MemoryLimit.stdMemory.toMB.toInt.toJson),
JsObject("logs" -> (LogLimit.stdLogSize.toMB.toInt + 1).toJson),
JsObject(
"TIMEOUT" -> TimeLimit.STD_DURATION.toMillis.toInt.toJson,
"MEMORY" -> MemoryLimit.stdMemory.toMB.toInt.toJson),
JsObject(
"timeout" -> (TimeLimit.STD_DURATION.toMillis.toDouble + .01).toJson,
"memory" -> (MemoryLimit.stdMemory.toMB.toDouble + .01).toJson),
JsObject("timeout" -> null, "memory" -> null),
JsObject("timeout" -> JsNull, "memory" -> JsNull),
JsObject(
"timeout" -> TimeLimit.STD_DURATION.toMillis.toString.toJson,
"memory" -> MemoryLimit.stdMemory.toMB.toInt.toString.toJson))
limits.foreach { p =>
a[DeserializationException] should be thrownBy ActionLimits.serdes.read(p)
}
}
it should "pass the correct error message through" in {
val serdes = Seq(TimeLimit.serdes, MemoryLimit.serdes, LogLimit.serdes)
serdes foreach { s =>
withClue(s"serializer $s") {
if (s != LogLimit.serdes) {
val lb = the[DeserializationException] thrownBy s.read(JsNumber(0))
lb.getMessage should include("below allowed threshold")
} else {
val lb = the[DeserializationException] thrownBy s.read(JsNumber(-1))
lb.getMessage should include("a negative size of an object is not allowed")
}
val ub = the[DeserializationException] thrownBy s.read(JsNumber(Int.MaxValue))
ub.getMessage should include("exceeds allowed threshold")
val int = the[DeserializationException] thrownBy s.read(JsNumber(2.5))
int.getMessage should include("limit must be whole number")
}
}
}
it should "reject bad limit values" in {
an[IllegalArgumentException] should be thrownBy ActionLimits(
TimeLimit(TimeLimit.MIN_DURATION - 1.millisecond),
MemoryLimit(),
LogLimit())
an[IllegalArgumentException] should be thrownBy ActionLimits(
TimeLimit(),
MemoryLimit(MemoryLimit.minMemory - 1.B),
LogLimit())
an[IllegalArgumentException] should be thrownBy ActionLimits(
TimeLimit(),
MemoryLimit(),
LogLimit(LogLimit.minLogSize - 1.B))
an[IllegalArgumentException] should be thrownBy ActionLimits(
TimeLimit(),
MemoryLimit(),
LogLimit(),
ConcurrencyLimit(ConcurrencyLimit.minConcurrent - 1))
an[IllegalArgumentException] should be thrownBy ActionLimits(
TimeLimit(TimeLimit.MAX_DURATION + 1.millisecond),
MemoryLimit(),
LogLimit())
an[IllegalArgumentException] should be thrownBy ActionLimits(
TimeLimit(),
MemoryLimit(MemoryLimit.maxMemory + 1.B),
LogLimit())
an[IllegalArgumentException] should be thrownBy ActionLimits(
TimeLimit(),
MemoryLimit(),
LogLimit(LogLimit.maxLogSize + 1.B))
an[IllegalArgumentException] should be thrownBy ActionLimits(
TimeLimit(),
MemoryLimit(),
LogLimit(),
ConcurrencyLimit(ConcurrencyLimit.maxConcurrent + 1))
}
it should "parse activation id as uuid" in {
val id = "213174381920559471141441e1111111"
val aid = ActivationId.parse(id)
assert(aid.isSuccess)
assert(aid.get.toString == id)
}
it should "parse activation id as uuid when made up of no numbers" in {
val id = "a" * 32
val aid = ActivationId.parse(id)
assert(aid.isSuccess)
assert(aid.get.toString == id)
}
it should "parse activation id as uuid when made up of no letters" in {
val id = "1" * 32
val aid = ActivationId.parse(id)
assert(aid.isSuccess)
assert(aid.get.toString == id)
}
it should "parse an activation id as uuid when it is a number" in {
val id = "1" * 32
val aid = Try { ActivationId.serdes.read(BigInt(id).toJson) }
assert(aid.isSuccess)
assert(aid.get.toString == id)
}
it should "not parse invalid activation id" in {
val id = "213174381920559471141441e111111z"
assert(ActivationId.parse(id).isFailure)
Try(ActivationId.serdes.read(JsString(id))) shouldBe Failure {
DeserializationException(Messages.activationIdIllegal)
}
}
it should "not parse activation id if longer than uuid" in {
val id = "213174381920559471141441e1111111abc"
assert(ActivationId.parse(id).isFailure)
Try(ActivationId.serdes.read(JsString(id))) shouldBe Failure {
DeserializationException(Messages.activationIdLengthError(SizeError("Activation id", id.length.B, 32.B)))
}
}
it should "not parse activation id if shorter than uuid" in {
val id = "213174381920559471141441e1"
ActivationId.parse(id) shouldBe 'failure
Try(ActivationId.serdes.read(JsString(id))) shouldBe Failure {
DeserializationException(Messages.activationIdLengthError(SizeError("Activation id", id.length.B, 32.B)))
}
}
behavior of "Js Helpers"
it should "project paths from json object" in {
val js = JsObject("a" -> JsObject("b" -> JsObject("c" -> JsString("v"))), "b" -> JsString("v"))
JsHelpers.fieldPathExists(js) shouldBe true
JsHelpers.fieldPathExists(js, "a") shouldBe true
JsHelpers.fieldPathExists(js, "a", "b") shouldBe true
JsHelpers.fieldPathExists(js, "a", "b", "c") shouldBe true
JsHelpers.fieldPathExists(js, "a", "b", "c", "d") shouldBe false
JsHelpers.fieldPathExists(js, "b") shouldBe true
JsHelpers.fieldPathExists(js, "c") shouldBe false
JsHelpers.getFieldPath(js) shouldBe Some(js)
JsHelpers.getFieldPath(js, "x") shouldBe None
JsHelpers.getFieldPath(js, "b") shouldBe Some(JsString("v"))
JsHelpers.getFieldPath(js, "a") shouldBe Some(JsObject("b" -> JsObject("c" -> JsString("v"))))
JsHelpers.getFieldPath(js, "a", "b") shouldBe Some(JsObject("c" -> JsString("v")))
JsHelpers.getFieldPath(js, "a", "b", "c") shouldBe Some(JsString("v"))
JsHelpers.getFieldPath(js, "a", "b", "c", "d") shouldBe None
JsHelpers.getFieldPath(JsObject.empty) shouldBe Some(JsObject.empty)
}
}
| starpit/openwhisk | tests/src/test/scala/org/apache/openwhisk/core/entity/test/SchemaTests.scala | Scala | apache-2.0 | 34,438 |
package org.codeswarm
/** @see [[PolyMap]]
* @see [[PolyMap.Index]]
*/
package object polymap | chris-martin/polymap | src/main/scala/package.scala | Scala | apache-2.0 | 98 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes.physical.stream
import org.apache.flink.table.planner.calcite.FlinkTypeFactory
import org.apache.flink.table.planner.plan.nodes.exec.spec.TemporalTableSourceSpec
import org.apache.flink.table.planner.plan.nodes.exec.stream.StreamExecLookupJoin
import org.apache.flink.table.planner.plan.nodes.exec.{ExecNode, InputProperty}
import org.apache.flink.table.planner.plan.nodes.physical.common.CommonPhysicalLookupJoin
import org.apache.flink.table.planner.plan.utils.{FlinkRelOptUtil, FlinkRexUtil, JoinTypeUtil}
import org.apache.flink.table.planner.utils.JavaScalaConversionUtil
import org.apache.calcite.plan.{RelOptCluster, RelOptTable, RelTraitSet}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.core.{JoinInfo, JoinRelType}
import org.apache.calcite.rex.RexProgram
import java.util
import scala.collection.JavaConverters._
/**
* Stream physical RelNode for temporal table join that implemented by lookup.
*/
class StreamPhysicalLookupJoin(
cluster: RelOptCluster,
traitSet: RelTraitSet,
input: RelNode,
temporalTable: RelOptTable,
tableCalcProgram: Option[RexProgram],
joinInfo: JoinInfo,
joinType: JoinRelType)
extends CommonPhysicalLookupJoin(
cluster,
traitSet,
input,
temporalTable,
tableCalcProgram,
joinInfo,
joinType)
with StreamPhysicalRel {
override def requireWatermark: Boolean = false
override def copy(traitSet: RelTraitSet, inputs: util.List[RelNode]): RelNode = {
new StreamPhysicalLookupJoin(
cluster,
traitSet,
inputs.get(0),
temporalTable,
tableCalcProgram,
joinInfo,
joinType)
}
override def translateToExecNode(): ExecNode[_] = {
val (projectionOnTemporalTable, filterOnTemporalTable) = calcOnTemporalTable match {
case Some(program) =>
val (projection, filter) = FlinkRexUtil.expandRexProgram(program)
(JavaScalaConversionUtil.toJava(projection), filter.orNull)
case _ =>
(null, null)
}
new StreamExecLookupJoin(
JoinTypeUtil.getFlinkJoinType(joinType),
remainingCondition.orNull,
new TemporalTableSourceSpec(temporalTable),
allLookupKeys.map(item => (Int.box(item._1), item._2)).asJava,
projectionOnTemporalTable,
filterOnTemporalTable,
InputProperty.DEFAULT,
FlinkTypeFactory.toLogicalRowType(getRowType),
getRelDetailedDescription)
}
}
| lincoln-lil/flink | flink-table/flink-table-planner/src/main/scala/org/apache/flink/table/planner/plan/nodes/physical/stream/StreamPhysicalLookupJoin.scala | Scala | apache-2.0 | 3,270 |
/*
* Copyright 2014-2022 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.core.model
import java.awt.Color
import com.netflix.atlas.core.stacklang.Context
import com.netflix.atlas.core.stacklang.Interpreter
import com.netflix.atlas.core.stacklang.SimpleWord
import com.netflix.atlas.core.stacklang.StandardVocabulary.Macro
import com.netflix.atlas.core.stacklang.Vocabulary
import com.netflix.atlas.core.stacklang.Word
import com.netflix.atlas.core.util.Strings
object StyleVocabulary extends Vocabulary {
import com.netflix.atlas.core.model.ModelExtractors._
val name: String = "style"
val dependsOn: List[Vocabulary] = List(FilterVocabulary)
val words: List[Word] = List(
// Adjust the text for the legend
Legend,
Decode,
SearchAndReplace,
// Map to a particular axis
Axis,
// Legacy time shift operator
// https://github.com/Netflix/atlas/issues/64
Offset,
// Reducing and ordering the set of data on the chart
Filter,
Sort,
Order,
Limit,
Macro("head", List(":limit"), List("name,sps,:eq,(,nf.cluster,),:by,2")),
// Operations for manipulating the line style or presentation
Alpha,
Color,
Palette,
LineStyle,
LineWidth,
Macro("area", List("area", ":ls"), List("name,sps,:eq,:sum")),
Macro("line", List("line", ":ls"), List("name,sps,:eq,:sum")),
Macro("stack", List("stack", ":ls"), List("name,sps,:eq,(,nf.cluster,),:by")),
Macro("vspan", List("vspan", ":ls"), List("name,sps,:eq,:sum,:dup,200e3,:gt")),
// Legacy macro for visualizing epic expressions
Macro("des-epic-viz", desEpicViz, List("name,sps,:eq,:sum,10,0.1,0.5,0.2,0.2,4"))
)
sealed trait StyleWord extends SimpleWord {
protected def matcher: PartialFunction[List[Any], Boolean] = {
case (_: String) :: PresentationType(_) :: _ => true
}
protected def executor: PartialFunction[List[Any], List[Any]] = {
case (v: String) :: PresentationType(t) :: s =>
t.copy(settings = t.settings + (name -> v)) :: s
}
override def signature: String = "TimeSeriesExpr String -- StyleExpr"
}
case object Alpha extends SimpleWord {
override def name: String = "alpha"
override def summary: String =
"""
|Set the alpha value for the colors on the line. The value should be a two digit hex number
|where `00` is transparent and `ff` is opague. This setting will be ignored if the
|[color](style-color) setting is used for the same line.
""".stripMargin.trim
protected def matcher: PartialFunction[List[Any], Boolean] = {
case (_: String) :: PresentationType(_) :: _ => true
}
protected def executor: PartialFunction[List[Any], List[Any]] = {
case (v: String) :: PresentationType(t) :: s =>
val settings = t.settings.get("color") match {
case Some(c) => t.settings + ("color" -> withAlpha(c, v)) - "alpha"
case None => t.settings + ("alpha" -> v)
}
t.copy(settings = settings) :: s
}
private def withAlpha(color: String, alpha: String): String = {
val a = Integer.parseInt(alpha, 16)
val c = Strings.parseColor(color)
val nc = new Color(c.getRed, c.getGreen, c.getBlue, a)
"%08x".format(nc.getRGB)
}
override def signature: String = "TimeSeriesExpr String -- StyleExpr"
override def examples: List[String] = List(
"name,sps,:eq,:sum,:stack,40",
"name,sps,:eq,:sum,:stack,f00,:color,40"
)
}
case object Color extends SimpleWord {
override def name: String = "color"
override def summary: String =
"""
|Set the color for the line. The value should be one of:
|
|* [Hex triplet](http://en.wikipedia.org/wiki/Web_colors#Hex_triplet), e.g. f00 is red.
|* 6 digit hex RBG, e.g. ff0000 is red.
|* 8 digit hex ARGB, e.g. ffff0000 is red. The first byte is the [alpha](style-alpha)
| setting to use with the color.
""".stripMargin.trim
protected def matcher: PartialFunction[List[Any], Boolean] = {
case (_: String) :: PresentationType(_) :: _ => true
}
protected def executor: PartialFunction[List[Any], List[Any]] = {
case (v: String) :: PresentationType(t) :: s =>
val settings = t.settings + ("color" -> v) - "alpha" - "palette"
t.copy(settings = settings) :: s
}
override def signature: String = "TimeSeriesExpr String -- StyleExpr"
override def examples: List[String] = List(
"name,sps,:eq,:sum,ff0000",
"name,sps,:eq,:sum,f00",
"name,sps,:eq,:sum,40,:alpha,f00"
)
}
case object Palette extends SimpleWord {
override def name: String = "palette"
override def summary: String =
"""
|Set the [palette](Color-Palettes) to use for the results of an expression. This
|operator is allows for scoping a palette to a particular group by instead of to
|all lines that share the same axis. A common use-case is to have multiple stacked
|group by expressions using different palettes. For example, suppose I want to create
|a graph showing overall request per second hitting my services with successful requests
|shown in shades of [green](Color-Palettes#greens) and errors in shades of
|[red](Color-Palettes#reds). This can make it easy to visually see if a change is
|due to an increase in errors:
|
|
|
|Or a spike in successful requests:
|
|
|
|Since: 1.6
""".stripMargin.trim
protected def matcher: PartialFunction[List[Any], Boolean] = {
case (_: String) :: PresentationType(_) :: _ => true
case StringListType(_) :: PresentationType(_) :: _ => true
}
protected def executor: PartialFunction[List[Any], List[Any]] = {
case (v: String) :: PresentationType(t) :: s =>
val settings = t.settings + ("palette" -> v) - "color" - "alpha"
t.copy(settings = settings) :: s
case StringListType(vs) :: PresentationType(t) :: s =>
val v = vs.mkString("(,", ",", ",)")
val settings = t.settings + ("palette" -> v) - "color" - "alpha"
t.copy(settings = settings) :: s
}
override def signature: String = "TimeSeriesExpr String -- StyleExpr"
override def examples: List[String] = List(
"name,sps,:eq,:sum,reds",
"name,sps,:eq,:sum,(,nf.cluster,),:by,reds",
"name,sps,:eq,:sum,(,nf.cluster,),:by,(,1a9850,91cf60,d9ef8b,fee08b,fc8d59,d73027,)"
)
}
case object LineStyle extends StyleWord {
override def name: String = "ls"
override def summary: String =
"""
|Set the line style. The value should be one of:
|
|* `line`: this is the default, draws a normal line.
|* `area`: fill in the space between the line value and 0 on the Y-axis.
|* `stack`: stack the filled area on to the previous stacked lines on the same axis.
|* `vspan`: non-zero datapoints will be drawn as a vertical span.
|
|See the [line style examples](Line-Styles) page for more information.
""".stripMargin.trim
override def examples: List[String] =
List(
"name,sps,:eq,:sum,(,name,),:by,line",
"name,sps,:eq,:sum,(,name,),:by,area",
"name,sps,:eq,:sum,(,name,),:by,stack",
"name,sps,:eq,:sum,(,name,),:by,200e3,:gt,vspan"
)
}
case object LineWidth extends StyleWord {
override def name: String = "lw"
override def summary: String =
"""
|The width of the stroke used when drawing the line.
""".stripMargin.trim
override def examples: List[String] = List("name,sps,:eq,:sum,(,name,),:by,2")
}
case object Axis extends StyleWord {
override def name: String = "axis"
override def summary: String =
"""
|Specify which Y-axis to use for the line.
""".stripMargin.trim
override def examples: List[String] = List("name,sps,:eq,:sum,1")
}
case object Offset extends SimpleWord {
protected def matcher: PartialFunction[List[Any], Boolean] = {
case StringListType(_) :: PresentationType(_) :: _ => true
}
protected def executor: PartialFunction[List[Any], List[Any]] = {
case StringListType(vs) :: PresentationType(t) :: s =>
val v = Interpreter.toString(List(vs))
t.copy(settings = t.settings + (name -> v)) :: s
}
override def signature: String = "TimeSeriesExpr List -- StyleExpr"
override def name: String = "offset"
override def summary: String =
"""
|> :warning: **Deprecated**. Use the [data variant](data-offset) with signature
|> `TimeSeriesExpr Duration -- TimeSeriesExpr` instead.
|
|Shift the time frame to use when fetching the data. The expression will be copied for
|each shift value in the list.
""".stripMargin.trim
override def examples: List[String] = List("name,sps,:eq,:sum,(,0h,1d,1w,)")
}
object Filter extends Word {
override def name: String = "filter"
override def matches(stack: List[Any]): Boolean = stack match {
case TimeSeriesType(_) :: (_: StyleExpr) :: _ => true
case _ => false
}
override def execute(context: Context): Context = {
context.stack match {
case TimeSeriesType(ts) :: (se: StyleExpr) :: s =>
val rs = FilterVocabulary.Filter.execute(context.copy(stack = ts :: se.expr :: s))
val newExpr = se.copy(expr = rs.stack.head.asInstanceOf[TimeSeriesExpr])
rs.copy(stack = newExpr :: rs.stack.tail)
case _ =>
invalidStack
}
}
override def summary: String =
"""
|Filter the output based on another expression. This operation is an overload to allow
|applying filters after presentation settings have been set. See the
|[main filter page](filter-filter) for more details on general usage.
""".stripMargin
override def signature: String = "StyleExpr TimeSeriesExpr -- StyleExpr"
override def examples: List[String] =
List("name,sps,:eq,:sum,(,nf.cluster,),:by,$nf.cluster,:legend,:stat-max,30e3,:gt")
}
//
// Legend transforms
//
case object Legend extends StyleWord {
override def name: String = "legend"
override def summary: String =
"""
|Set the legend text. Legends can contain variables based on the
|exact keys matched in the query clause and keys used in a
|[group by](data-by). Variables start with a `$` sign and can optionally
|be enclosed between parentheses. The parentheses are required for cases
|where the characters immediately following the name could be a part
|of the name. If a variable is not defined, then the name of the variable
|will be used as the substitution value.
|
|The variable `atlas.offset` can be used to indicate the [time shift](data-offset)
|used for the underlying data.
""".stripMargin.trim
override def examples: List[String] =
List(
s"name,sps,:eq,(,name,),:by,$$name",
s"name,sps,:eq,(,nf.cluster,),:by,cluster+$$nf.cluster",
s"name,sps,:eq,(,name,),:by,$$(unknown)",
s"name,sps,:eq,:sum,1w,:offset,$$(name)+$$(atlas.offset)"
)
}
case object Decode extends SimpleWord {
override def name: String = "decode"
override def signature: String = "TimeSeriesExpr String -- StyleExpr"
override def summary: String =
"""
|> :warning: It is recommended to avoid using special symbols or trying to
|> encode structural information into tag values. This feature should be used
|> sparingly and with great care to ensure it will not result in a combinatorial
|> explosion.
|
|Perform decoding of the legend strings. Generally data going into Atlas
|is restricted to simple ascii characters that are easy to use as part of
|a URI. Most commonly the clients will convert unsupported characters to
|an `_`. In some case it is desirable to be able to reverse that for the
|purposes of presentation.
|
|* `none`: this is the default. It will not modify the legend string.
|* `hex`: perform a hex decoding of the legend string. This is similar to
| [url encoding](https://en.wikipedia.org/wiki/Percent-encoding) except
| that the `_` character is used instead of `%` to indicate the start of
| an encoded symbol. The decoding is lenient, if the characters following
| the `_` are not valid hexadecimal digits then it will just copy those
| characters without modification.
|
|Since: 1.5
""".stripMargin.trim
override def examples: List[String] = List(s"1,one_21_25_26_3F,:legend,hex")
protected def matcher: PartialFunction[List[Any], Boolean] = {
case (_: String) :: PresentationType(_) :: _ => true
}
protected def executor: PartialFunction[List[Any], List[Any]] = {
case (v: String) :: PresentationType(t) :: s =>
val transform = s"$v,:$name"
val newTransform = t.settings.get("sed").fold(transform)(p => s"$p,$transform")
t.copy(settings = t.settings + ("sed" -> newTransform)) :: s
}
}
case object SearchAndReplace extends SimpleWord {
override def name: String = "s"
override def signature: String = "TimeSeriesExpr s:String r:String -- StyleExpr"
override def summary: String =
"""
|Perform a search and replace on the legend strings. This command is similar
|to the global search and replace (`s/regexp/replace/g`) operation from tools
|like [vim][vim] or [sed][sed].
|
|[vim]: http://vim.wikia.com/wiki/Search_and_replace
|[sed]: https://linux.die.net/man/1/sed
|
|The replacement string can use variables to refer to the capture groups of the
|input expression. The syntax is that same as for [legends](style-legend).
|
|Since: 1.6
""".stripMargin.trim
override def examples: List[String] = List(
s"name,sps,:eq,(,nf.cluster,),:by,$$nf.cluster,:legend,^nccp-(.*)$$,$$1",
s"name,sps,:eq,(,nf.cluster,),:by,$$nf.cluster,:legend,^nccp-(?<stack>.*)$$,$$stack",
s"name,sps,:eq,(,nf.cluster,),:by,$$nf.cluster,:legend,nccp-,_",
s"name,sps,:eq,(,nf.cluster,),:by,$$nf.cluster,:legend,([a-z]),_$$1"
)
protected def matcher: PartialFunction[List[Any], Boolean] = {
case (_: String) :: (_: String) :: PresentationType(_) :: _ => true
}
protected def executor: PartialFunction[List[Any], List[Any]] = {
case (r: String) :: (s: String) :: PresentationType(t) :: stack =>
val transform = s"$s,$r,:$name"
val newTransform = t.settings.get("sed").fold(transform)(p => s"$p,$transform")
t.copy(settings = t.settings + ("sed" -> newTransform)) :: stack
}
}
//
// Sorting operators
//
case object Sort extends StyleWord {
override def name: String = "sort"
override def summary: String =
"""
|Sort the results of an expression in the legend by one of the
|[summary statistics](filter-stat) or by the legend text. The default
|behavior is to sort by the legend text. This will sort in ascending
|order by default, for descending order use [order](style-order).
|
|Since: 1.5
""".stripMargin.trim
override def examples: List[String] =
List(
"name,sps,:eq,:sum,(,nf.cluster,),:by,max",
"name,sps,:eq,:sum,(,nf.cluster,),:by,legend"
)
}
case object Order extends StyleWord {
override def name: String = "order"
override def summary: String =
"""
|Order to use for [sorting](style-sort) results. Supported values are `asc` and `desc`
|for ascending and descending order respectively. Default is `asc`.
|
|Since: 1.5
""".stripMargin.trim
override def examples: List[String] =
List(
"name,sps,:eq,:sum,(,nf.cluster,),:by,max,:sort,asc",
"name,sps,:eq,:sum,(,nf.cluster,),:by,desc"
)
}
case object Limit extends StyleWord {
override def name: String = "limit"
override def summary: String =
"""
|Restrict the output to the first `N` lines from the input expression. The lines will be
|chosen in order based on the [sort](style-sort) and [order](style-order) used.
|
|Since: 1.6
""".stripMargin.trim
override def examples: List[String] =
List(
"name,sps,:eq,:sum,(,nf.cluster,),:by,3",
"name,sps,:eq,:sum,(,nf.cluster,),:by,max,:sort,desc,:order,2"
)
}
//
// Helper macros
//
private def desEpicViz = List(
// Show signal line as a vertical span
":des-epic-signal",
":vspan",
"40",
":alpha",
"triggered",
":legend",
// Raw input line
"line",
":get",
"line",
":legend",
// Lower bounds
"minPredNoiseBound",
":get",
"minPredNoiseBound",
":legend",
"minPredPercentBound",
":get",
"minPredPercentBound",
":legend",
// Upper bounds
"maxPredNoiseBound",
":get",
"maxPredNoiseBound",
":legend",
"maxPredPercentBound",
":get",
"maxPredPercentBound",
":legend"
)
}
| Netflix/atlas | atlas-core/src/main/scala/com/netflix/atlas/core/model/StyleVocabulary.scala | Scala | apache-2.0 | 18,115 |
package net.koofr.driveby.resources
case class DirRename(name: String)
object DirRename {
import spray.json.DefaultJsonProtocol._
implicit val format = jsonFormat1(DirRename.apply)
}
| koofr/driveby | src/main/scala/net/koofr/driveby/resources/DirRename.scala | Scala | mit | 192 |
Subsets and Splits