code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package dawn.flow
trait Batch[A, R]
extends Source1[A]
with Source[R]
with Accumulate[A]
with CloseListener {
def schedulerClose = source1.scheduler
override def closePriority = -1
//defined by Accumulate
def listen1(x: Timestamped[A]) = ()
private var schedulerL: Scheduler = Scheduler.newOne()
override def scheduler: Scheduler = {
schedulerL
}
def f(lA: ListT[A]): ListT[R]
var numClosed = 0
var numClosedRequired: Int = 1
def onScheduleClose() = {
numClosed += 1
if (numClosed == numClosedRequired) {
val lR = f(accumulated(0).reverse)
lR.foreach(x => scheduler.registerEvent(broadcast(x), x.time))
scheduler.run()
}
}
//The default implementation on Source1 is on the wrong ch.
//Not worth it to change all code since it once the first
//scheduler is closed there is no effect possible
override def setup() = {
super.setup()
source1.addChannel(Channel1(this, source1.scheduler))
source1.addChannel(ChannelN(1, this, source1.scheduler))
}
override def reset() = {
super.reset()
numClosed = 0
schedulerL = Scheduler.newOne()
}
}
object Batch {
def apply[A, B](rawSource11: Source[A],
f1: ListT[A] => ListT[B],
name1: String = "Batch") = new Batch[A, B] {
def rawSource1 = rawSource11
def name = name1
def f(x: ListT[A]) = f1(x)
}
}
class ReplayWithScheduler[A](val rawSource1: Source[A]) extends Batch[A, A] {
def name = "Replay w/ scheduler"
def f(lA: ListT[A]) = lA
override lazy val sources = rawSources
// override def numClosedRequired = 2
}
class Replay[A](val rawSource1: Source[A], sourceScheduler: ReplayWithScheduler[_])
extends Source1[A]
with Source[A]
with Accumulate[A]
with CloseListener {
def name = "Replay"
def schedulerClose = rawSource1.scheduler
def listen1(x: Timestamped[A]) = ()
//clever trick to make topological sort order depend on the sourceOut schedule
override def rawSources = sourceScheduler :: super.rawSources
override lazy val sources = rawSources
override def closePriority = 1
override def scheduler = sourceScheduler.scheduler
def onScheduleClose() = {
accumulated(0).foreach(x => scheduler.registerEvent(broadcast(x), x.time))
}
//Same as above
override def setup() = {
schedulerClose.addCloseListener(sourceScheduler)
sourceScheduler.numClosedRequired += 1
rawSource1.addChannel(ChannelN(1, this, rawSource1.scheduler))
super.setup()
}
}
| rubenfiszel/scala-flow | core/src/main/scala/Batch.scala | Scala | mit | 2,548 |
package scala.pickling.inheritance
import org.scalatest.FunSuite
import scala.pickling._, scala.pickling.Defaults._, json._
abstract class Creature {
val species: String
}
abstract class Person extends Creature {
val species = "human"
val name: String
val age: Int
}
case class Firefighter(val name: String, val age: Int, val salary: Int) extends Person
class InheritanceTest extends FunSuite {
test("main") {
val f = new Firefighter("Josephine", 48, 40000)
val pickleF = (f: Firefighter).pickle
assert(pickleF.value === """
|{
| "$type": "scala.pickling.inheritance.Firefighter",
| "name": "Josephine",
| "age": 48,
| "salary": 40000
|}
""".trim.stripMargin)
assert(pickleF.unpickle[Firefighter] === f)
val pickleP = (f: Person).pickle
assert(pickleP.value === """
|{
| "$type": "scala.pickling.inheritance.Firefighter",
| "name": "Josephine",
| "age": 48,
| "salary": 40000
|}
""".trim.stripMargin)
assert(pickleP.unpickle[Person] === f)
val pickleC = (f: Creature).pickle
assert(pickleC.value === """
|{
| "$type": "scala.pickling.inheritance.Firefighter",
| "name": "Josephine",
| "age": 48,
| "salary": 40000
|}
""".trim.stripMargin)
assert(pickleC.unpickle[Creature] === f)
}
}
| scala/pickling | core/src/test/scala/scala/pickling/generation/InheritanceTest.scala | Scala | bsd-3-clause | 1,376 |
package pimpathon.scalaz
import org.junit.Test
import pimpathon.util.on
import scalaz.NonEmptyList
import pimpathon.multiMap._
import pimpathon.scalaz.nel._
import pimpathon.util._
import scalaz.syntax.either._
class NelTest {
@Test def unique(): Unit = NonEmptyList(1, 2, 1).unique === NonEmptyList(1, 2)
@Test def uniqueBy(): Unit = NonEmptyList("foo", "bar", "bard", "food", "foody", "bardo").uniqueBy(_.length) ===
NonEmptyList("foo", "bard", "foody")
@Test def filter(): Unit =
on(NonEmptyList(1), NonEmptyList(1, 2)).calling(_.filter(_ % 2 == 0)).produces(None, Some(NonEmptyList(2)))
@Test def filterNot(): Unit =
on(NonEmptyList(2), NonEmptyList(1, 2)).calling(_.filterNot(_ % 2 == 0)).produces(None, Some(NonEmptyList(1)))
@Test def max(): Unit = NonEmptyList(1, 3, 2).max(scalaz.std.anyVal.intInstance) === 3
@Test def min(): Unit = NonEmptyList(3, 1, 2).min(scalaz.std.anyVal.intInstance) === 1
@Test def partitionDisjunctions(): Unit =
NonEmptyList(1.left, "abc".right, "def".right, 2.left).partitionDisjunctions[List] ===
(List(1, 2), List("abc", "def"))
@Test def partitionEithers(): Unit = {
NonEmptyList[Either[Int, String]](Left(1), Right("abc"), Right("def"), Left(2)).partitionEithers[List] ===
(List(1, 2), List("abc", "def"))
}
@Test def toMultiMap(): Unit = on(NonEmptyList((1, 10), (1, 11), (2, 20), (2, 21)))
.calling(_.toMultiMap[List], _.toMultiMap[Set])
.produces(Map(1 β List(10, 11), 2 β List(20, 21)), Map(1 β Set(10, 11), 2 β Set(20, 21)))
@Test def asMultiMap_withKeys(): Unit = on(NonEmptyList(0, 1, 2, 3))
.calling(_.asMultiMap[List].withKeys(_ % 2), _.asMultiMap[NonEmptyList].withKeys(_ % 2))
.produces(Map(0 β List(0, 2), 1 β List(1, 3)), Map(0 β NonEmptyList(0, 2), 1 β NonEmptyList(1, 3)))
@Test def onlyOption(): Unit = on(NonEmptyList(1), NonEmptyList(1, 2)).calling(_.onlyOption).produces(Some(1), None)
@Test def onlyEither(): Unit =
on(NonEmptyList(1, 2), NonEmptyList(1)).calling(_.onlyEither).produces(Left(NonEmptyList(1, 2)), Right(1))
@Test def onlyDisjunction(): Unit =
on(NonEmptyList(1, 2), NonEmptyList(1)).calling(_.onlyDisjunction).produces(NonEmptyList(1, 2).left, 1.right)
@Test def onlyOrDisjunction(): Unit =
on(NonEmptyList(1, 2), NonEmptyList(1)).calling(_.onlyOrDisjunction(_.size)).produces(2.left, 1.right)
} | raymanoz/pimpathon | src/test/scala/pimpathon/scalaz/nel.scala | Scala | apache-2.0 | 2,394 |
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//package com.datamountaineer.streamreactor.connect.druid.writer
//
//import java.nio.file.{Path, Paths}
//
//import com.datamountaineer.streamreactor.connect.druid.{CuratorRequiringSuite, DruidIntegrationSuite, TestBase}
//import com.datamountaineer.streamreactor.connect.druid.config.{DruidSinkConfig, DruidSinkSettings}
//import org.apache.kafka.connect.data.Struct
//import org.apache.kafka.connect.sink.SinkRecord
//import org.scalatest.{BeforeAndAfter, FunSuite, Matchers}
//
//import scala.io.Source
//
//
//class DruidDbWriterTest extends FunSuite with TestBase with Matchers with BeforeAndAfter
// with DruidIntegrationSuite with CuratorRequiringSuite {
//
// test("DruidDbWriter") {
// // withDruidStack {
// //(curator, broker, coordinator, overlord) => {
// // val zkConnect = curator.getZookeeperClient.getCurrentConnectionString
// ///modifyFile(zkConnect)
// val config = new DruidSinkConfig(getProps())
// val settings = DruidSinkSettings(config)
//
// val writer = new DruidDbWriter(settings)
//
// val schema = WikipediaSchemaBuilderFn()
// val struct = new Struct(schema)
// .put("page", "Kafka Connect")
// .put("language", "en")
// .put("user", "datamountaineer")
// .put("unpatrolled", true)
// .put("newPage", true)
// .put("robot", false)
// .put("anonymous", false)
// .put("namespace", "article")
// .put("continent", "Europe")
// .put("country", "UK")
// .put("region", "Greater London")
// .put("city", "LDN")
//
// val sinkRecord = new SinkRecord(TOPIC, 1, null, null, schema, struct, 0)
// writer.write(Seq(sinkRecord))
// }
//
// //}
// //}
//}
| CodeSmell/stream-reactor | kafka-connect-druid/src/test/scala/com/datamountaineer/streamreactor/connect/druid/writer/DruidDbWriterTest.scala | Scala | apache-2.0 | 2,363 |
/***********************************************************************
* Copyright (c) 2013-2019 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.features.avro
import java.io._
import java.util.zip.Deflater
import org.apache.avro.file.{CodecFactory, DataFileWriter}
import org.geotools.data.simple.SimpleFeatureCollection
import org.locationtech.geomesa.features.SerializationOption.SerializationOptions
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
/**
* Class that writes out Avro SimpleFeature Data Files which carry the SimpleFeatureType Schema
* along with them.
*
* @param os output stream.
* @param sft simple feature type being written
* @param compression compression level, from -1 to 9. @see java.util.zip.Deflater
*/
class AvroDataFileWriter(os: OutputStream,
sft: SimpleFeatureType,
compression: Int = Deflater.DEFAULT_COMPRESSION) extends Closeable with Flushable {
private val schema = AvroSimpleFeatureUtils.generateSchema(sft, withUserData = true, withFeatureId = true, namespace = sft.getName.getNamespaceURI)
private val writer = new AvroSimpleFeatureWriter(sft, SerializationOptions.withUserData)
private val dfw = new DataFileWriter[SimpleFeature](writer)
if (compression != Deflater.NO_COMPRESSION) {
dfw.setCodec(CodecFactory.deflateCodec(compression))
}
AvroDataFile.setMetaData(dfw, sft)
dfw.create(schema, os)
def append(fc: SimpleFeatureCollection): Unit =
SelfClosingIterator(fc.features()).foreach(dfw.append)
def append(sf: SimpleFeature): Unit = dfw.append(sf)
override def close(): Unit = if (dfw != null) { dfw.close() }
override def flush(): Unit = if (dfw != null) { dfw.flush() }
}
| elahrvivaz/geomesa | geomesa-features/geomesa-feature-avro/src/main/scala/org/locationtech/geomesa/features/avro/AvroDataFileWriter.scala | Scala | apache-2.0 | 2,161 |
package types
trait Lambda {
class HKT1[A[_]]
class HKT2[A[_ >: Int <: AnyVal]]
class HKT3[A[_, _]]
class TC[A, B]
type T1 = HKT1[[X] =>> TC[X, Int]]
type T2 = HKT2[[X >: Int <: AnyVal] =>> TC[X, Int]]
type T3 = HKT3[[X, Y] =>> TC[X, Y]]
} | JetBrains/intellij-scala | tasty/runtime/data/types/Lambda.scala | Scala | apache-2.0 | 260 |
package keemun
import scala.concurrent.Future
import keemun.clients.github.RepositoriesFetcher
import keemun.models.Repo
/**
* Copyright (c) Nikita Kovaliov, maizy.ru, 2014
* See LICENSE.txt for details.
*/
package object controllers {
def fetchRepositories(): Future[Seq[Repo]] = {
val fetcher = RepositoriesFetcher.playAppInstance
val config = keemun.Config.playAppInstance
fetcher.getRepos(config.sources.accountsSettings)
}
}
| maizy/keemun | app/keemun/controllers/package.scala | Scala | mit | 453 |
package intellij.haskell.psi.impl
import com.intellij.openapi.util.TextRange
import com.intellij.psi.{AbstractElementManipulator, PsiFileFactory}
import com.intellij.util.IncorrectOperationException
import intellij.haskell.HaskellLanguage
import org.jetbrains.annotations.Nullable
/**
* @author ice1000
*/
class HaskellStringLiteralManipulator extends AbstractElementManipulator[HaskellStringLiteralElementImpl] {
@Nullable
@throws[IncorrectOperationException]
override def handleContentChange(psi: HaskellStringLiteralElementImpl,
range: TextRange,
newContent: String): HaskellStringLiteralElementImpl = {
val oldText = psi.getText
val newText = oldText.substring(0, range.getStartOffset) + newContent + oldText.substring(range.getEndOffset)
val newElement = PsiFileFactory
.getInstance(psi.getProject)
.createFileFromText("a.hs", HaskellLanguage.Instance, newText, false, false)
.getLastChild
.getLastChild
psi.replace(newElement).asInstanceOf[HaskellStringLiteralElementImpl]
}
override def getRangeInElement(element: HaskellStringLiteralElementImpl): TextRange = {
new TextRange(1, element.getTextLength - 1)
}
}
| rikvdkleij/intellij-haskell | src/main/scala/intellij/haskell/psi/impl/HaskellStringLiteralManipulator.scala | Scala | apache-2.0 | 1,251 |
package gv
package isi
package std.conversions
import scala.util.{ Try }
import scala.concurrent.{ Future }
import convertible.{ ~=> }
/**
* Provide conversions from standard types to [[scala.concurrent.Future]].
*/
trait ToFutureConversions extends Any {
@inline
final implicit def `Try[T] ~=> Future[T]`[T]: Try[T] ~=> Future[T] = Future.fromTry _
}
| mouchtaris/jleon | src/main/scala-2.12/gv/isi/std/conversions/ToFutureConversions.scala | Scala | mit | 363 |
/*
* Copyright 2015 Heiko Seeberger
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.heikoseeberger.akkahttpjson4s
import akka.actor.ActorSystem
import akka.http.scaladsl.marshalling.Marshal
import akka.http.scaladsl.model._
import akka.http.scaladsl.model.ContentTypes.{ `application/json`, `text/plain(UTF-8)` }
import akka.http.scaladsl.unmarshalling.{ Unmarshal, Unmarshaller }
import akka.http.scaladsl.unmarshalling.Unmarshaller.UnsupportedContentTypeException
import akka.stream.scaladsl.{ Sink, Source }
import org.json4s.{ DefaultFormats, jackson, native }
import org.scalatest.BeforeAndAfterAll
import org.scalatest.matchers.should.Matchers
import org.scalatest.wordspec.AsyncWordSpec
import scala.concurrent.Await
import scala.concurrent.duration.DurationInt
object Json4sSupportSpec {
final case class Foo(bar: String) {
require(bar startsWith "bar", "bar must start with 'bar'!")
}
}
final class Json4sSupportSpec extends AsyncWordSpec with Matchers with BeforeAndAfterAll {
import Json4sSupport._
import Json4sSupportSpec._
private implicit val system = ActorSystem()
private implicit val formats = DefaultFormats
private val foo = Foo("bar")
"Json4sSupport" should {
"enable marshalling and unmarshalling objects for `DefaultFormats` and `jackson.Serialization`" in {
implicit val serialization = jackson.Serialization
Marshal(foo)
.to[RequestEntity]
.flatMap(Unmarshal(_).to[Foo])
.map(_ shouldBe foo)
}
"enable streamed marshalling and unmarshalling for json arrays" in {
implicit val serialization = jackson.Serialization
val foos = (0 to 100).map(i => Foo(s"bar-$i")).toList
Marshal(Source(foos))
.to[RequestEntity]
.flatMap { entity =>
Unmarshal(entity).to[String].onComplete(println)
Unmarshal(entity).to[SourceOf[Foo]]
}
.flatMap(_.runWith(Sink.seq))
.map(_ shouldBe foos)
}
"enable marshalling and unmarshalling objects for `DefaultFormats` and `native.Serialization`" in {
implicit val serialization = native.Serialization
Marshal(foo)
.to[RequestEntity]
.flatMap(Unmarshal(_).to[Foo])
.map(_ shouldBe foo)
}
"provide proper error messages for requirement errors" in {
implicit val serialization = native.Serialization
val entity =
HttpEntity(MediaTypes.`application/json`, """{ "bar": "baz" }""")
Unmarshal(entity)
.to[Foo]
.failed
.map(_ should have message "requirement failed: bar must start with 'bar'!")
}
"fail with NoContentException when unmarshalling empty entities" in {
implicit val serialization = native.Serialization
val entity = HttpEntity.empty(`application/json`)
Unmarshal(entity)
.to[Foo]
.failed
.map(_ shouldBe Unmarshaller.NoContentException)
}
"fail with UnsupportedContentTypeException when Content-Type is not `application/json`" in {
implicit val serialization = native.Serialization
val entity = HttpEntity("""{ "bar": "bar" }""")
Unmarshal(entity)
.to[Foo]
.failed
.map(
_ shouldBe UnsupportedContentTypeException(Some(`text/plain(UTF-8)`), `application/json`)
)
}
"allow unmarshalling with passed in Content-Types" in {
implicit val serialization = native.Serialization
val foo = Foo("bar")
val `application/json-home` =
MediaType.applicationWithFixedCharset("json-home", HttpCharsets.`UTF-8`, "json-home")
final object CustomJson4sSupport extends Json4sSupport {
override def unmarshallerContentTypes = List(`application/json`, `application/json-home`)
}
import CustomJson4sSupport._
val entity = HttpEntity(`application/json-home`, """{ "bar": "bar" }""")
Unmarshal(entity).to[Foo].map(_ shouldBe foo)
}
}
override protected def afterAll() = {
Await.ready(system.terminate(), 42.seconds)
super.afterAll()
}
}
| hseeberger/akka-http-json | akka-http-json4s/src/test/scala/de/heikoseeberger/akkahttpjson4s/Json4sSupportSpec.scala | Scala | apache-2.0 | 4,618 |
/*
* Copyright (c) 2014-2018 Snowplow Analytics Ltd. All rights reserved.
*
* This program is licensed to you under the Apache License Version 2.0,
* and you may not use this file except in compliance with the
* Apache License Version 2.0.
* You may obtain a copy of the Apache License Version 2.0 at
* http://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the Apache License Version 2.0 is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the Apache License Version 2.0 for the specific
* language governing permissions and limitations there under.
*/
import sbt._
import Keys._
import sbtassembly._
import com.typesafe.sbt.packager.Keys.{daemonUser, maintainer}
import com.typesafe.sbt.packager.docker.{ ExecCmd, Cmd }
import com.typesafe.sbt.packager.docker.DockerPlugin.autoImport._
import com.typesafe.sbt.packager.universal.UniversalPlugin.autoImport._
object BuildSettings {
//Basic settings for our app
lazy val basicSettings = Seq[Setting[_]](
organization := "com.snowplowanalytics",
version := "0.5.0",
name := "iglu-server",
description := "Scala schema server for Iglu",
scalaVersion := "2.11.12",
scalacOptions := Seq("-deprecation", "-encoding", "utf8",
"-unchecked", "-feature", "-Xcheckinit", "-Ypartial-unification"),
scalacOptions in Test := Seq("-Yrangepos", "-deprecation"),
maxErrors := 5,
// http://www.scala-sbt.org/0.13.0/docs/Detailed-Topics/Forking.html
fork in run := true,
fork in Test := true,
// Ensure that the correct config file is loaded for testing
javaOptions in Test += "-Dconfig.file=./test.conf",
shellPrompt := { s => Project.extract(s).currentProject.id + " > " }
)
// Makes our SBT app settings available from within the app
lazy val scalifySettings = Seq(sourceGenerators in Compile += task[Seq[File]] {
val file = (sourceManaged in Compile).value / "settings.scala"
IO.write(file, s"""
|package com.snowplowanalytics.iglu.server.generated
|object Settings {
| val organization = "${organization.value}"
| val version = "${version.value}"
| val name = "${name.value}"
| val shortName = "sr"
|}
|""".stripMargin)
Seq(file)
})
// sbt-assembly settings for building an executable
import sbtassembly.AssemblyKeys._
import sbtassembly.AssemblyPlugin._
lazy val sbtAssemblySettings = assemblySettings ++ Seq(
// Simple name
assemblyJarName in assembly := { s"${name.value}-${version.value}.jar" },
assemblyMergeStrategy in assembly := {
case PathList("com", "github", "fge", tail@_*) => MergeStrategy.first
case x =>
val oldStrategy = (assemblyMergeStrategy in assembly).value
oldStrategy(x)
}
)
lazy val dockerPgInstallCmds = Seq(
ExecCmd("RUN", "cp", "/opt/docker/docker-entrypoint.sh", "/usr/local/bin/"),
Cmd("RUN", "apt update"),
Cmd("RUN", "mkdir -p /usr/share/man/man7"),
Cmd("RUN", "apt install -y postgresql-client-9.6")
)
lazy val dockerSettings = Seq(
// Use single entrypoint script for all apps
Universal / sourceDirectory := new File(baseDirectory.value, "scripts"),
dockerRepository := Some("snowplow-docker-registry.bintray.io"),
dockerUsername := Some("snowplow"),
dockerBaseImage := "snowplow-docker-registry.bintray.io/snowplow/base-debian:0.1.0",
Docker / maintainer := "Snowplow Analytics Ltd. <[email protected]>",
Docker / daemonUser := "root", // Will be gosu'ed by docker-entrypoint.sh
dockerEntrypoint := Seq("docker-entrypoint.sh"),
dockerCommands ++= dockerPgInstallCmds,
dockerCmd := Seq("--help")
)
lazy val buildSettings = basicSettings ++ scalifySettings ++ sbtAssemblySettings ++ dockerSettings
}
| snowplow/iglu | 2-repositories/iglu-server/project/BuildSettings.scala | Scala | apache-2.0 | 4,153 |
package com.iflytek.rocket.spark.sql
import com.iflytek.rocket.uitl.ConstValue
import org.apache.spark._
import org.apache.spark.SparkContext._
import org.apache.spark.sql.{SaveMode, SQLContext}
case class Record(key: Int, value: String)
case class Person(name: String, age: Int)
/**
* Created by seawalker on 2015/4/3.
*/
object ApiTest {
def main(args: Array[String]) {
val sparkConf = new SparkConf().setAppName("RDDRelation").setMaster("local[2]")
val sc = new SparkContext(sparkConf)
val sqlContext = new SQLContext(sc)
import sqlContext.implicits._
// val df = sc.parallelize((10 to 19).map(i => Record(i, s"val_$i"))).toDF()
// df.registerTempTable("records")
// //df.saveAsParquetFile(ConstValue.DATA_PARENT_PATH + "/test/key=1",)
// df.save(ConstValue.DATA_PARENT_PATH + "/test/key=1", "parquet", SaveMode.Append)
// Read in parquet file. Parquet files are self-describing so the schmema is preserved.
val parquetFile = sqlContext.parquetFile(ConstValue.DATA_PARENT_PATH + "/opcode")
parquetFile.registerTempTable("tmp")
sqlContext.sql("select * from tmp where bizid=01 and version in (411002, 511002)")
.collect()
.foreach(println)
sc.stop()
}
}
| seawalkermiaoer/rocket | streaming/project/src/main/scala/com/iflytek/rocket/spark/sql/ApiTest.scala | Scala | mit | 1,234 |
package poly.algebra
import poly.algebra.factory._
/**
* Represents a concatenative semigroup (i.e. semigroups operating on sequences, etc. that bears the operation `++`)
* @author Tongfei Chen
* @since 0.2.1
*/
trait ConcatenativeSemigroup[X] { self =>
/** The concatenation (`++`) operation of this semigroup. */
def concat(x: X, y: X): X
/** Computes the concatenated sequence ''x'' ++ ''x'' ++ Β·Β·Β· ++ ''x'' with ''x'' repeated for ''n'' times. */
def concatN(x: X, n: Int): X = asSemigroupWithConcat.combineN(x, n)
/** Casts this structure as a symbol-agnostic semigroup. */
def asSemigroupWithConcat: Semigroup[X] = new Semigroup[X] {
def op(x: X, y: X) = self.concat(x, y)
}
}
object ConcatenativeSemigroup extends ImplicitGetter[ConcatenativeSemigroup] {
/** Creates an concatenative semigroup of the specific type using the `++` operation provided. */
def create[X](f: (X, X) => X) = new ConcatenativeSemigroup[X] {
def concat(x: X, y: X) = f(x, y)
}
}
| ctongfei/poly-algebra | src/main/scala/poly/algebra/ConcatenativeSemigroup.scala | Scala | mit | 1,005 |
/*
* Copyright (C) 2009-2013 Mathias Doenitz, Alexander Myltsev
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package akka.rtcweb.protocol.sdp.parser
import akka.parboiled2._
import akka.parboiled2.util.Base64
/**
* Rules for parsing Base-64 encoded strings.
*/
trait Base64Parsing { this: Parser β
import Base64Parsing._
/**
* Parses an RFC4045-encoded string and decodes it onto the value stack.
*/
def rfc2045String: Rule1[Array[Byte]] = base64StringOrBlock(rfc2045Alphabet, rfc2045StringDecoder)
/**
* Parses an RFC4045-encoded string potentially containing newlines and decodes it onto the value stack.
*/
def rfc2045Block: Rule1[Array[Byte]] = base64StringOrBlock(rfc2045Alphabet, rfc2045BlockDecoder)
/**
* Parses a org.parboiled2.util.Base64.custom()-encoded string and decodes it onto the value stack.
*/
def base64CustomString: Rule1[Array[Byte]] = base64StringOrBlock(customAlphabet, customStringDecoder)
/**
* Parses a org.parboiled2.util.Base64.custom()-encoded string potentially containing newlines
* and decodes it onto the value stack.
*/
def base64CustomBlock: Rule1[Array[Byte]] = base64StringOrBlock(customAlphabet, customBlockDecoder)
/**
* Parses a BASE64-encoded string with the given alphabet and decodes it onto the value
* stack using the given codec.
*/
def base64StringOrBlock(alphabet: CharPredicate, decoder: Decoder): Rule1[Array[Byte]] = {
val start = cursor
rule {
oneOrMore(alphabet) ~ run {
decoder(input.sliceCharArray(start, cursor)) match {
case null β MISMATCH
case bytes β push(bytes)
}
}
}
}
}
object Base64Parsing {
type Decoder = Array[Char] β Array[Byte]
val rfc2045Alphabet = CharPredicate(Base64.rfc2045().getAlphabet).asMaskBased
val customAlphabet = CharPredicate(Base64.custom().getAlphabet).asMaskBased
val rfc2045StringDecoder: Decoder = decodeString(Base64.rfc2045())
val customStringDecoder: Decoder = decodeString(Base64.custom())
val rfc2045BlockDecoder: Decoder = decodeBlock(Base64.rfc2045())
val customBlockDecoder: Decoder = decodeBlock(Base64.custom())
def decodeString(codec: Base64)(chars: Array[Char]): Array[Byte] = codec.decodeFast(chars)
def decodeBlock(codec: Base64)(chars: Array[Char]): Array[Byte] = codec.decode(chars)
} | danielwegener/akka-rtcweb | src/main/scala/akka/rtcweb/protocol/sdp/parser/Base64Parsing.scala | Scala | apache-2.0 | 2,865 |
package org.mrgeo.spark.job.yarn
import java.io.File
import java.net.URL
import org.apache.spark.SparkConf
import org.mrgeo.spark.job.JobArguments
import org.mrgeo.utils.SparkUtils
import scala.collection.mutable.ArrayBuffer
import scala.tools.nsc.util.ScalaClassLoader.URLClassLoader
object MrGeoYarnDriver {
final val DRIVER:String = "mrgeo.driver.class"
}
class MrGeoYarnDriver {
def run (job:JobArguments, cl:ClassLoader, conf:SparkConf) = {
// need to get initialize spark.deploy.yarn... by reflection, because it is package private
// to org.apache.spark
// tell Spark we are running in yarn mode
System.setProperty("SPARK_YARN_MODE", "true")
val clientargsclazz = cl.loadClass("org.apache.spark.deploy.yarn.ClientArguments")
if (clientargsclazz != null) {
val caconst = clientargsclazz.getConstructor(classOf[Array[String]], classOf[SparkConf])
val args = caconst.newInstance(toYarnArgs(job, cl, conf), conf)
val clientclazz = cl.loadClass("org.apache.spark.deploy.yarn.Client")
if (clientclazz != null) {
val const = clientclazz.getConstructor(clientargsclazz, classOf[SparkConf])
val client = const.newInstance(args.asInstanceOf[Object], conf)
val run = clientclazz.getMethod("run")
try {
run.invoke(client)
}
catch {
case e:Exception => {
e.printStackTrace()
throw e
}
}
}
}
}
def toYarnArgs(job:JobArguments, cl:ClassLoader, conf:SparkConf) :Array[String] = {
val args = new ArrayBuffer[String]()
// " --jar JAR_PATH Path to your application's JAR file (required in yarn-cluster mode)\\n" +
// " --class CLASS_NAME Name of your application's main class (required)\\n" +
// " --arg ARG Argument to be passed to your application's main class.\\n" +
// " Multiple invocations are possible, each will be passed in order.\\n" +
// " --num-executors NUM Number of executors to start (Default: 2)\\n" +
// " --executor-cores NUM Number of cores for the executors (Default: 1).\\n" +
// " --driver-memory MEM Memory for driver (e.g. 1000M, 2G) (Default: 512 Mb)\\n" +
// " --executor-memory MEM Memory per executor (e.g. 1000M, 2G) (Default: 1G)\\n" +
// " --name NAME The name of your application (Default: Spark)\\n" +
// " --queue QUEUE The hadoop queue to use for allocation requests (Default: 'default')\\n" +
// " --addJars jars Comma separated list of local jars that want SparkContext.addJar to work with.\\n" +
// " --files files Comma separated list of files to be distributed with the job.\\n" +
// " --archives archives Comma separated list of archives to be distributed with the job."
val driverClass = MrGeoYarnJob.getClass.getName.replaceAll("\\\\$","")
val driverJar = SparkUtils.jarForClass(driverClass, cl)
args += "--class"
args += driverClass
args += "--jar"
args += driverJar
args += "--num-executors"
args += "4" // job.executors.toString
conf.set("spark.executor.instances", "4")// job.executors.toString)
args += "--executor-cores"
args += "2" // job.cores.toString
conf.set("spark.executor.cores", "2") // job.cores.toString)
val exmemory:Int = SparkUtils.humantokb(job.memory)
val dvmem = SparkUtils.kbtohuman(exmemory / (job.cores * job.executors))
//val dvmem = SparkUtils.kbtohuman((Runtime.getRuntime.maxMemory() / 1024).toInt)
//args += "--driver-memory"
//args += dvmem
conf.set("spark.driver.memory", dvmem)
//args += "--executor-memory"
//args += job.memory
conf.set("spark.executor.memory", job.memory)
args += "--name"
if (job.name != null && job.name.length > 0) {
args += job.name
}
else {
args += "Unnamed MrGeo Job"
}
// need to make sure the driver jar isn't included. Yuck!
val driver = new File(driverJar).getName
var clean = ""
job.jars.foreach(jar => {
if (!jar.contains(driver)) {
if (clean.length > 0) {
clean += ","
}
clean += jar
}
})
args += "--addJars"
args += clean
args += "--arg"
args += "--" + MrGeoYarnDriver.DRIVER
args += "--arg"
args += job.driverClass
// map the user params
job.params.foreach(p => {
args += "--arg"
args += "--" + p._1
args += "--arg"
args += p._2
})
args.toArray
}
}
| tjkrell/MrGEO | mrgeo-core/src/main/scala/org/mrgeo/spark/job/yarn/MrGeoYarnDriver.scala | Scala | apache-2.0 | 4,705 |
package mr.merc.ui.common
import mr.merc.unit.Soldier
import scalafx.beans.property.StringProperty
import scalafx.beans.property.ObjectProperty
import mr.merc.unit.view.SoldierView
import mr.merc.unit.view.StandState
import mr.merc.image.MImage
import mr.merc.local.Localization
class SoldierWrapper(private var _soldier: Option[Soldier], factor:Double) {
val hp: StringProperty = StringProperty("")
val name: StringProperty = StringProperty("")
val exp: StringProperty = StringProperty("")
val soldierType: StringProperty = StringProperty("")
val expToNextLevel: StringProperty = StringProperty("")
val movePoints: StringProperty = StringProperty("")
val movePointsTotal: StringProperty = StringProperty("")
val image: ObjectProperty[MImage] = ObjectProperty(MImage.emptyImage)
val level: StringProperty = StringProperty("")
refreshProperties()
def soldier = _soldier
def soldier_=(newSoldier: Option[Soldier]): Unit = {
_soldier = newSoldier
refreshProperties()
}
def refreshProperties(): Unit = {
_soldier match {
case Some(soldier) =>
hp.value = soldier.hp.toString + "/" + soldier.soldierType.hp
name.value = soldier.name
exp.value = soldier.exp.toString
expToNextLevel.value = soldier.soldierType.exp.toString
movePoints.value = soldier.movePointsRemain.toString
movePointsTotal.value = soldier.soldierType.movement.toString
image.value = standImage(soldier).scaledImage(factor)
level.value = soldier.soldierType.level.toString
soldierType.value = Localization(soldier.soldierType.name)
case None =>
hp.value = ""
name.value = ""
exp.value = ""
expToNextLevel.value = ""
movePoints.value = ""
movePointsTotal.value = ""
image.value = MImage.emptyImage
level.value = ""
soldierType.value = ""
}
}
private def standImage(soldier: Soldier): MImage = {
val view = new SoldierView(soldier, 1.0)
view.state = StandState
view.images(StandState).head
}
} | RenualdMarch/merc | src/main/scala/mr/merc/ui/common/SoldierWrapper.scala | Scala | gpl-3.0 | 2,078 |
package models
import fixture.NameGenerator
import org.joda.time.DateTime
import com.github.nscala_time.time.Imports._
import play.api.db.DB
import play.api.test.WithApplication
import org.scalatest.Tag
import org.slf4j.LoggerFactory
/**
* Use SBT command "test-only *ContainerSpec" to run all tests.
*
* Use SBT command "test-only *ContainerSpec -- -n single" to run test(s) tagged as "single".
*/
class ContainerSpec extends ModelSpec {
val log = LoggerFactory.getLogger(this.getClass)
val nameGenerator = new NameGenerator(this.getClass)
"Container" should {
"get container associated to user" in new WithApplication(fakeApplication()) {
DB.withConnection { implicit conn =>
val containerIndex = 1
val user = Factory.createUser
val container = DbUtils.createContainerLikedToUser(user)
val result = Container.getOwnedByAdminByIndex(containerIndex, user.email)
result should not be (None)
result.get.id should not be (None)
result.get should have (
'name (container.name),
'temperatureExpected (container.temperatureExpected),
'temperatureRange (container.temperatureRange),
'readFrequency (container.readFrequency),
'monitorID (container.monitorID)
)
}
}
"add a reading" taggedAs(Tag("single")) in new WithApplication(fakeApplication()) {
DB.withConnection { implicit conn =>
val containerIndex = 1
val user = Factory.createUser
val container = DbUtils.createContainerLikedToUser(user)
val temperature = 20.0
val status = nameGenerator.next[String]
val readingTime = DateTime.now
Container.addReading(container.id.get, user.email, temperature, status, readingTime)
// val info = SQL("""SELECT * FROM container_readings""").as(Container.reading *)
// log.info(s"containerId: ${container.id.get}, readingTime: ${timeFormatter.print(readingTime)}")
// log.info(s"******** $info")
val reading = DbUtils.containerLastReading(container)
reading should not be (None)
reading.map { r =>
r.id should not be (None)
r should have (
'readTemperature (temperature),
'readStatus (status)
)
(r.readTime to readingTime).millis should be < 1000L
}
}
}
}
}
| cbsrbiobank/tempmonServer | test/models/ContainerSpec.scala | Scala | bsd-2-clause | 2,433 |
/*
* Copyright 2013 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.twitter.storehaus.algebra
import com.twitter.algebird.{ Semigroup, StatefulSummer }
import com.twitter.util.{Promise, Future, Time}
import com.twitter.storehaus.FutureCollector
import scala.collection.breakOut
/** For any given V, construct a StatefulSummer of Map[K, V]
*/
trait SummerConstructor[K] {
def apply[V](mon: Semigroup[V]): StatefulSummer[Map[K, V]]
}
/**
* A Mergeable that sits on top of another mergeable and pre-aggregates before pushing into
* merge/multiMerge.
* This is very useful for cases where you have some keys that are very hot, or you have a remote
* mergeable that you don't want to constantly hit.
*/
class BufferingMergeable[K, V](store: Mergeable[K, V], summerCons: SummerConstructor[K])
extends Mergeable[K, V] {
protected implicit val collector = FutureCollector.bestEffort
protected val summer: StatefulSummer[Map[K, PromiseLink[V]]] =
summerCons(new PromiseLinkSemigroup(semigroup))
override def semigroup: Semigroup[V] = store.semigroup
// Flush the underlying buffer
def flush: Future[Unit] =
summer.flush.map(mergeFlush) match {
case None => Future.Unit
case Some(mKV) => collector(mKV.values.toSeq).unit
}
// Return the value before, like a merge
private def mergeFlush(toMerge: Map[K, PromiseLink[V]]): Map[K, Future[Option[V]]] =
// Now merge any evicted items from the buffer to below:
store
.multiMerge(toMerge.mapValues(_.value))
.map { case (k, foptV) =>
val prom = toMerge(k)
foptV.respond { prom.completeIfEmpty(_) }
k -> foptV
}
override def multiMerge[K1 <: K](kvs: Map[K1, V]): Map[K1, Future[Option[V]]] = {
// no lazy
val links: Map[K, PromiseLink[V]] = kvs.map { case (k1, v) => k1 -> PromiseLink(v) }(breakOut)
summer.put(links).foreach(mergeFlush)
kvs.map { case (k, _) => k -> links(k).promise }
}
override def close(t: Time): Future[Unit] = store.close(t)
}
/** A MergeableStore that does the same buffering as BufferingMergeable, but
* flushes on put/get.
*/
class BufferingStore[K, V](store: MergeableStore[K, V], summerCons: SummerConstructor[K])
extends BufferingMergeable[K, V](store, summerCons) with MergeableStore[K, V] {
// Assumes m has k, which is true by construction below
private def wait[K1<:K, W](k: K1, m: Future[Map[K1, Future[W]]]): Future[W] =
m.flatMap { _.apply(k) }
override def multiGet[K1 <: K](ks: Set[K1]): Map[K1, Future[Option[V]]] = {
val allGets = flush.map(_ => store.multiGet(ks))
ks.map { k => k -> wait(k, allGets) }(breakOut)
}
override def multiPut[K1 <: K](kvs: Map[K1, Option[V]]): Map[K1, Future[Unit]] = {
val allPuts = flush.map(_ => store.multiPut(kvs))
kvs.map { case (k, _) => k -> wait(k, allPuts) }
}
}
| twitter/storehaus | storehaus-algebra/src/main/scala/com/twitter/storehaus/algebra/BufferingStore.scala | Scala | apache-2.0 | 3,384 |
trait Ent(name: String)
case class MyContent(key: String, value: String)
case class MyInsert(key: String)
object Dsl {
inline def ent: Ent = new Ent("something") {}
extension (ent: Ent)
inline def content(inline ins: MyInsert) = MyContent(ins.key, "blah")
}
| dotty-staging/dotty | tests/run-macros/i10880/Dsl_1.scala | Scala | apache-2.0 | 267 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.nn
import com.intel.analytics.bigdl.nn.abstractnn.TensorModule
import com.intel.analytics.bigdl.tensor.{Storage, Tensor}
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.utils.Engine
import com.intel.analytics.bigdl.utils.RandomGenerator._
import scala.concurrent.Future
import scala.reflect.ClassTag
/**
* Dropout masks(set to zero) parts of input using a bernoulli distribution.
* Each input element has a probability initP of being dropped. If `scale` is
* true(true by default), the outputs are scaled by a factor of `1/(1-initP)`
* during training.
* During evaluating, output is the same as input.
*
* It has been proven an effective approach for regularization and preventing
* co-adaptation of feature detectors. For more details, plese see
* [Improving neural networks by preventing co-adaptation of feature detectors]
* (https://arxiv.org/abs/1207.0580)
*
* @param initP the probability p
* @param inplace whether to make `input` and `output` share the same storage
* @param scale whether to scale the output by a factor of `1 / (1 - p)`
*/
@SerialVersionUID(- 4636332259181125718L)
class Dropout[T: ClassTag](
val initP: Double = 0.5,
val inplace: Boolean = false,
var scale: Boolean = true)(
implicit ev: TensorNumeric[T]) extends TensorModule[T] {
private var p = initP
var noise = Tensor[T]()
var isResampling = true
@transient
protected var results: Array[Future[Unit]] = null
/**
* Get current probability to be dropped.
* @return p
*/
def getP(): T = {
return ev.fromType[Double](p)
}
override def updateOutput(input: Tensor[T]): Tensor[T] = {
if (inplace) {
this.output = input
} else {
this.output.resizeAs(input).copy(input)
}
if (results == null) {
results = new Array[Future[Unit]](Engine.model.getPoolSize)
}
if (train) {
noise.resizeAs(input)
if (input.isContiguous()) {
if (isResampling) {
val noiseData = noise.storage().array()
var taskSize = noise.nElement() / Engine.model.getPoolSize
var extraTask = noise.nElement() % Engine.model.getPoolSize
var allocated = 0
val offset = this.output.storageOffset() - 1
val data = this.output.storage.array()
var i = 0
while (allocated < noise.nElement()) {
val start = allocated
allocated += taskSize
if (extraTask > 0) {
allocated += 1
extraTask -= 1
}
val end = allocated
results(i) = Engine.model.invoke(() => {
var k = start
while (k < end) {
noiseData(k) = if (RNG.bernoulli(1 - p)) {
if (scale) {
data(offset + k) = ev.divide(data(offset + k), ev.fromType[Double](1 - p))
ev.fromType[Double](1.0 / (1 - p))
} else {
ev.fromType[Int](1)
}
} else {
data(offset + k) = ev.fromType[Int](0)
ev.fromType[Int](0)
}
k += 1
}
})
i += 1
}
Engine.model.sync(results)
} else {
this.output.cmul(noise)
}
this.output
} else {
if (isResampling) {
noise.bernoulli(1 - p)
if (scale) {
noise.div(ev.fromType[Double](1 - p))
}
}
this.output.cmul(noise)
}
} else if (!scale) {
this.output.mul(ev.fromType[Double](1 - p))
} else {
output
}
}
override def updateGradInput(input: Tensor[T], gradOutput: Tensor[T]): Tensor[T] = {
if (results == null) {
results = new Array[Future[Unit]](Engine.model.getPoolSize)
}
if (train) {
if (inplace) {
this.gradInput = gradOutput
} else {
this.gradInput.resizeAs(gradOutput).copy(gradOutput)
}
if (gradInput.isContiguous()) {
val noiseData = noise.storage().array()
var taskSize = noise.nElement() / Engine.model.getPoolSize
var extraTask = noise.nElement() % Engine.model.getPoolSize
val gradInputData = gradInput.storage().array()
val gradInputOffset = gradInput.storageOffset() - 1
var allocated = 0
var i = 0
while (allocated < noise.nElement()) {
val start = allocated
allocated += taskSize
if (extraTask > 0) {
allocated += 1
extraTask -= 1
}
val end = allocated
results(i) = Engine.model.invoke(() => {
var k = start
while (k < end) {
gradInputData(gradInputOffset + k) =
ev.times(gradInputData(gradInputOffset + k), noiseData(k))
k += 1
}
})
i += 1
}
Engine.model.sync(results)
this.gradInput
} else {
this.gradInput.cmul(noise)
}
} else {
throw new IllegalArgumentException("backprop only defined while training")
}
this.gradInput
}
override def clearState(): this.type = {
super.clearState()
noise.set()
this
}
/**
* Set current probability to be dropped.
* @param p new probability
* @return
*/
def setP(p: Double): this.type = {
this.p = p
this
}
override def toString(): String = {
s"${getPrintName}($p)"
}
}
object Dropout {
def apply[T: ClassTag](
initP: Double = 0.5,
inplace: Boolean = false,
scale: Boolean = true)(implicit ev: TensorNumeric[T]) : Dropout[T] = {
new Dropout[T](initP, inplace, scale)
}
}
| 122689305/BigDL | spark/dl/src/main/scala/com/intel/analytics/bigdl/nn/Dropout.scala | Scala | apache-2.0 | 6,403 |
package poly.collection.mut
import poly.collection._
import poly.collection.impl._
import poly.macroutil._
/**
* Represents a mutable indexed sequence.
* Fast random access and update should be guaranteed.
* @since 0.1.0
* @author Tongfei Chen
*/
trait ValueMutableIndexedSeq[T] extends ValueMutableSeq[T] with IndexedSeq[T] {
/** $Onlogn Sorts this sequence in-place using the order provided. */
def sort_![U >: T]()(implicit U: Order[U]) = Sorting.quickSort[T, U](this)
/** $On Reverses this sequence in-place. */
def reverse_!(): Unit = {
var l = 0
var r = length - 1
while (l <= r) {
swap_!(l, r)
l += 1
r -= 1
}
}
/**
* $O1 Swaps two elements in this sequence in-place.
* @param i Index of the first element
* @param j Index of the second element
*/
def swap_!(i: Int, j: Int): Unit = {
val t = this(i)
this(i) = this(j)
this(j) = t
}
/** $On Transforms this sequence in-place given a function. */
override def map_!(f: T => T): Unit = {
FastLoop.ascending(0, length, 1) { i =>
this(i) = f(this(i))
}
}
/** $On Randomly shuffles this sequence in-place using the Fisher-Yates shuffling algorithm. */
def shuffle_!(): Unit = {
val r = new java.util.Random()
FastLoop.descending(length - 1, 0, -1) { i =>
val j = r.nextInt(i + 1)
swap_!(i, j)
}
}
}
| ctongfei/poly-collection | core/src/main/scala/poly/collection/mut/ValueMutableIndexedSeq.scala | Scala | mit | 1,388 |
/*
* Copyright (c) 2015 Robert Conrad - All Rights Reserved.
* Unauthorized copying of this file, via any medium is strictly prohibited.
* This file is proprietary and confidential.
* Last modified by rconrad, 1/4/15 1:55 PM
*/
package base.common.service
import akka.actor.ActorSystem
import akka.util.Timeout
import scala.concurrent.duration.Duration
/**
* Server-level configuration and operations
* @author rconrad
*/
trait CommonService extends Service {
final def serviceManifest = manifest[CommonService]
/**
* Default timeout for futures, actors, etc. across the system
*/
def defaultDuration: Duration
def defaultTimeout: Timeout
/**
* Force some Akka config because it's a little bitch that doesn't listen well
*/
def makeActorSystem(): ActorSystem
}
object CommonService extends ServiceCompanion[CommonService]
| robconrad/base-api | project-common/src/main/scala/base/common/service/CommonService.scala | Scala | mit | 863 |
package build
import java.io.{File => JFile}
import collection.JavaConverters._
import play.api.Logger
import models._
import org.eclipse.jgit.api.Git
import org.eclipse.jgit.internal.storage.file.FileRepository
import scala.util.Try
import org.apache.commons.io.FileUtils
import org.eclipse.jgit.transport._
import settings.Global
import org.eclipse.jgit.revwalk.{RevCommit, RevTag, RevWalk}
import scala.collection.immutable.TreeMap
import util.Util
trait RepositoryService {
def clone(project: Project): Try[JFile]
def checkout(project: Project, version: String): Try[JFile]
def cleanRepo(project: Project): Try[Unit]
def getVersions(project: Project): Try[Seq[String]]
def getAuthors(project: Project): Try[Seq[String]]
}
trait GitRepositoryService extends RepositoryService {
self: DirectoryHandler =>
SshSessionFactory.setInstance(new CustomConfigSessionFactory)
def clone(project: Project): Try[JFile] = Try {
val checkoutDir = repositoryForProject(project)
if(checkoutDir.exists()) {
cleanRepo(project)
}
cloneProjectTo(project, checkoutDir)
checkoutDir
}.recover {
case e: Exception => {
val exceptionMsg = e.getMessage
val buildFailureMessage =
// Really crude attempt to help the user for this quite specific issue.
if(project.repo_url.contains("gerrit") && exceptionMsg.endsWith(": not authorized")) {
s"""Clone failed - $exceptionMsg
|It looks like you might be using Gerrit without any SSH key set up.
|Have you tried using https://<gerrit host>/git/<project name>.git?""".stripMargin
} else {
"Clone failed - "+ exceptionMsg
}
Global.builds.createFailure(project.guid, project.head_version, buildFailureMessage)
Logger.error("Exception - "+buildFailureMessage, e)
throw e
}
}
def checkout(project: Project, version: String): Try[JFile] = Try {
val repoDir = new JFile(repositoryForProject(project), ".git")
Logger.info("Checking out repository for version="+version+" ["+repoDir.getAbsolutePath+"]")
if(repoDir.exists()) {
checkoutRepo(project, version, repoDir)
}
repoDir
}.recover {
case e: Exception =>
Global.builds.createFailure(project.guid, version, "Checkout failed - "+ e.getMessage)
Logger.error("Exception", e)
throw e
}
def cleanRepo(project: Project): Try[Unit] = Try {
val repoDir = repositoryForProject(project)
if(repoDir.exists()) {
Logger.info("Cleaning repository ["+repoDir.getAbsolutePath+"]")
FileUtils.deleteDirectory(repoDir)
if(repoDir.exists()) {
throw new BuildFailedException("Cannot clean directory ["+repoDir.getAbsolutePath+"]")
}
}
}.recover {
case e: Exception =>
Global.builds.createFailure(project.guid, project.head_version, "Clean failed - "+ e.getMessage)
Logger.error("Exception", e)
throw e
}
def getVersions(project: Project): Try[Seq[String]] = Try {
val repoDir = new JFile(repositoryForProject(project), ".git")
getVersionsForRepo(project, repoDir)
}.recover {
case e: Exception =>
Global.builds.createFailure(project.guid, project.head_version, "Getting versions failed - "+ e.getMessage)
Logger.error("Exception", e)
throw e
}
def getAuthors(project: Project): Try[Seq[String]] = Try {
val repoDir = new JFile(repositoryForProject(project), ".git")
getAuthorsForRepo(project, repoDir)
}.recover {
case e: Exception =>
Global.builds.createFailure(project.guid, project.head_version, "Getting authors failed - "+ e.getMessage)
Logger.error("Exception", e)
throw e
}
private def getAuthorsForRepo(project: Project, repoDir: JFile): Seq[String] = {
Logger.info("Retrieve authors for ["+project.name+"]")
val git = new Git(new FileRepository(repoDir))
// Assuming that over 500 commits we should have a good idea of the project authors
// Order by most commits in that period and take the top 4
Util.topAuthorUsernames(4, git.log().call().asScala.take(500).map { (rev: RevCommit) =>
rev.getAuthorIdent.getEmailAddress.takeWhile((ch: Char) => !'@'.equals(ch))
}.toSeq)
}
private def getVersionsForRepo(project: Project, repoDir: JFile) = {
Logger.info("Retrieve versions for ["+project.name+"]")
val refsIndex = "refs/tags/".length
val repo = new Git(new FileRepository(repoDir))
Seq(project.head_version) ++ repo.tagList().call().asScala.map(_.getName.substring(refsIndex)).toSeq
}
private def checkoutRepo(project: Project, version: String, repoDir: JFile): Git = {
val repo = new Git(new FileRepository(repoDir))
repo.checkout().setName(version).call()
repo
}
private def cloneProjectTo(project: Project, repoDir: JFile): Git = {
Logger.info("Cloning git repository ["+project.repo_url+"]. To ["+repoDir.getAbsoluteFile+"].")
Git.cloneRepository().setURI(project.repo_url).setDirectory(repoDir).call()
}
}
| grahamar/Giles | app/build/RepositoryService.scala | Scala | apache-2.0 | 5,021 |
/*
Deduction Tactics
Copyright (C) 2012-2015 Raymond Dodge
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.rayrobdod.boardGame
import org.scalatest.{FunSpec}
class RectangularSpaceTest extends FunSpec {
describe ("RectangularSpaceViaFutures") {
def directionTests(
factory:Function1[Function0[Option[RectangularSpace[Int]]], RectangularSpace[Int]],
dirFunction:Function1[RectangularSpace[Int], Option[RectangularSpace[Int]]]
) {
it ("is the result of calling the westOption parameter"){
val res = Some(unescapableSpace(0))
val src = factory( {() => res} );
assertResult(res){dirFunction(src)}
}
it ("factory from constructor is only evaluated once"){
val src = factory( {() => Some(unescapableSpace(scala.util.Random.nextInt()))} );
val res = dirFunction(src)
assertResult(res){dirFunction(src)}
}
}
describe ("west") {
directionTests(
{x => new RectangularSpaceViaFutures(1, x, noneFuture, noneFuture, noneFuture)},
{x => x.west}
)
}
describe ("north") {
directionTests(
{x => new RectangularSpaceViaFutures(1, noneFuture, x, noneFuture, noneFuture)},
{x => x.north}
)
}
describe ("east") {
directionTests(
{x => new RectangularSpaceViaFutures(1, noneFuture, noneFuture, x, noneFuture)},
{x => x.east}
)
}
describe ("south") {
directionTests(
{x => new RectangularSpaceViaFutures(1, noneFuture, noneFuture, noneFuture, x)},
{x => x.south}
)
}
describe ("adjacentSpaces") {
describe ("when all four elements are Some") {
val src = new RectangularSpaceViaFutures(1, unescapableSpaceFuture(34), unescapableSpaceFuture(64), unescapableSpaceFuture(134), unescapableSpaceFuture(-134))
it ("has a length of four") { assertResult(4){src.adjacentSpaces.size} }
it ("contains the west value") { assert(src.adjacentSpaces.map{x => x:RectangularSpace[Int]} contains src.west.get) }
it ("contains the east value") { assert(src.adjacentSpaces.map{x => x:RectangularSpace[Int]} contains src.east.get) }
it ("contains the north value" ) { assert(src.adjacentSpaces.map{x => x:RectangularSpace[Int]} contains src.north.get) }
it ("contains the south value" ) { assert(src.adjacentSpaces.map{x => x:RectangularSpace[Int]} contains src.south.get) }
}
describe ("when all four elements are None") {
def src = new RectangularSpaceViaFutures(1, noneFuture, noneFuture, noneFuture, noneFuture)
it ("has a length of zero") { assertResult(0){src.adjacentSpaces.size} }
}
describe ("when there is a mix of Some and None") {
val src = new RectangularSpaceViaFutures(1, unescapableSpaceFuture(7), unescapableSpaceFuture(-345), noneFuture, noneFuture)
it ("has a length equal to the count of Somes") { assertResult(2){src.adjacentSpaces.size} }
it ("contains the west value" ) { assert(src.adjacentSpaces.map{x => x:RectangularSpace[Int]} contains src.west.get) }
it ("contains the north value" ) { assert(src.adjacentSpaces.map{x => x:RectangularSpace[Int]} contains src.north.get) }
}
}
}
def noneFuture = {() => None}
def unescapableSpace[A](typ:A) = new RectangularSpaceViaFutures(typ, noneFuture, noneFuture, noneFuture, noneFuture)
def unescapableSpaceFuture[A](typ:A) = {() => Option(unescapableSpace(typ))}
}
| rayrobdod/boardGame | Model/src/test/scala/RectangularSpaceTest.scala | Scala | gpl-3.0 | 3,937 |
package thangiee.riotapi.matches
case class Participant(
championId: Int = 0,
highestAchievedSeasonTier: String = "",
masteries: List[Mastery] = Nil,
participantId: Int = 0,
runes: List[Rune] = Nil,
spell1Id: Int = 0,
spell2Id: Int = 0,
stats: ParticipantStats = ParticipantStats(),
teamId: Int = 0,
timeline: ParticipantTimeline = ParticipantTimeline()
)
| Thangiee/Riot-API-Scala | src/main/scala/thangiee/riotapi/matches/Participant.scala | Scala | mit | 379 |
package net.categoricaldata.universalalgebra
import net.categoricaldata.category._
trait Diagrams { category: Category =>
trait Diagram {
trait Cone {
def coneObject: O
def coneMap(o: O): M
}
case class ConeMap(source: Cone, target: Cone, mapBetweenConeObjects: M)
trait Cones extends Category {
override type O = Cone
override type M = ConeMap
override def source(m: ConeMap) = m.source
override def target(m: ConeMap) = m.target
override def identity(o: Cone) = ConeMap(o, o, category.identity(o.coneObject))
override def compose(m1: ConeMap, m2: ConeMap) = ConeMap(m1.source, m2.target, category.compose(m1.mapBetweenConeObjects, m2.mapBetweenConeObjects))
}
trait Cocone {
def coconeObject: O
def coconeMap(o: O): M
}
case class CoconeMap(source: Cocone, target: Cocone, mapBetweenCoconeObjects: M)
trait Cocones extends Category {
override type O = Cocone
override type M = CoconeMap
override def source(m: CoconeMap) = m.source
override def target(m: CoconeMap) = m.target
override def identity(o: Cocone) = CoconeMap(o, o, category.identity(o.coconeObject))
override def compose(m1: CoconeMap, m2: CoconeMap) = CoconeMap(m1.source, m2.target, category.compose(m1.mapBetweenCoconeObjects, m2.mapBetweenCoconeObjects))
}
}
}
trait Limits extends Diagrams { self: Category =>
def limit(d: Diagram): d.Cones with InitialObject
def limitCone(d: Diagram) = limit(d).initialObject
def limitObject(d: Diagram) = limitCone(d).coneObject
}
object Limits {
trait usingProductsAndEqualizers extends Limits { self: Category with Products with Equalizers =>
}
}
trait Colimits extends Diagrams { self: Category =>
def colimit(d: Diagram): d.Cocones with TerminalObject
def colimitCocone(d: Diagram) = colimit(d).terminalObject
def colimitObject(d: Diagram) = colimitCocone(d).coconeObject
}
object Colimits {
trait usingCoproductsAndCoequalizers extends Colimits { self: Category with Coproducts with Coequalizers =>
}
} | JasonGross/categoricaldata | src/main/scala/net/categoricaldata/universalalgebra/Limits.scala | Scala | mit | 2,028 |
/*
Copyright 2012-2015, University of Geneva.
This file is part of Great Balls of Fire (GBF).
Great Balls of Fire (GBF) is free software: you can redistribute it
and/or modify it under the terms of the GNU General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
Great Balls of Fire (GBF) is distributed in the hope that it will be
useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with Great Balls of Fire (GBF). If not, see
<http://www.gnu.org/licenses/>.
*/
package ch.unige
package object gbf {
import org.streum.configrity._
def loadConfig( fn: String ): Configuration = {
val fromSys = Configuration.systemProperties.detach("gbf")
fromSys include Configuration.load( fn )
}
}
| unigeSPC/gbf | simulator/src/package.scala | Scala | gpl-3.0 | 1,009 |
package chandu0101.scalajs.react.components.textfields
import japgolly.scalajs.react._
import scala.scalajs.js
import scala.scalajs.js.{Array => JArray}
/**
* key: PropTypes.string,
ref: PropTypes.String,
value: React.PropTypes.JArray[String],
valueLink: React.PropTypes.object,
defaultValue: React.PropTypes.JArray[String],
placeholder: React.PropTypes.string,
classNamespace: React.PropTypes.string,
addKeys: React.PropTypes.JArray[Int],
removeKeys: React.PropTypes.JArray[Int],
addOnBlur: React.PropTypes.bool,
onChange: React.PropTypes.(JArray[String],String) => Unit,
onChangeInput: React.PropTypes.String => Unit,
onBlur: React.PropTypes.() => Unit,
onKeyDown: React.PropTypes.ReactEventI => Unit,
onKeyUp: React.PropTypes.ReactEventI => Unit,
onTagAdd: React.PropTypes.String => Unit,
onTagRemove: React.PropTypes.String => Unit,
transform: React.PropTypes.String => Unit,
validate: React.PropTypes.String => Boolean,
validateAsync: React.PropTypes.func
*/
object ReactTagsInput {
def apply(onBlur: js.UndefOr[() => Unit] = js.undefined,
onKeyDown: js.UndefOr[ReactEventI => Unit] = js.undefined,
onTagRemove: js.UndefOr[String => Unit] = js.undefined,
onChange: js.UndefOr[(JArray[String], String) => Unit] = js.undefined,
removeKeys: js.UndefOr[JArray[Int]] = js.undefined,
validate: js.UndefOr[String => Boolean] = js.undefined,
classNamespace: js.UndefOr[String] = js.undefined,
ref: js.UndefOr[String] = js.undefined,
addOnBlur: js.UndefOr[Boolean] = js.undefined,
placeholder: js.UndefOr[String] = js.undefined,
valueLink: js.UndefOr[js.Object] = js.undefined,
onKeyUp: js.UndefOr[ReactEventI => Unit] = js.undefined,
key: js.UndefOr[String] = js.undefined,
addKeys: js.UndefOr[JArray[Int]] = js.undefined,
onTagAdd: js.UndefOr[String => Unit] = js.undefined,
validateAsync: js.UndefOr[js.Function] = js.undefined,
onChangeInput: js.UndefOr[String => Unit] = js.undefined,
defaultValue: js.UndefOr[JArray[String]] = js.undefined,
transform: js.UndefOr[String => Unit] = js.undefined,
value: js.UndefOr[JArray[String]] = js.undefined) = {
val p = js.Dynamic.literal()
onBlur.foreach(v => p.updateDynamic("onBlur")(v))
onKeyDown.foreach(v => p.updateDynamic("onKeyDown")(v))
onTagRemove.foreach(v => p.updateDynamic("onTagRemove")(v))
onChange.foreach(v => p.updateDynamic("onChange")(v))
removeKeys.foreach(v => p.updateDynamic("removeKeys")(v))
validate.foreach(v => p.updateDynamic("validate")(v))
classNamespace.foreach(v => p.updateDynamic("classNamespace")(v))
ref.foreach(v => p.updateDynamic("ref")(v))
addOnBlur.foreach(v => p.updateDynamic("addOnBlur")(v))
placeholder.foreach(v => p.updateDynamic("placeholder")(v))
valueLink.foreach(v => p.updateDynamic("valueLink")(v))
onKeyUp.foreach(v => p.updateDynamic("onKeyUp")(v))
key.foreach(v => p.updateDynamic("key")(v))
addKeys.foreach(v => p.updateDynamic("addKeys")(v))
onTagAdd.foreach(v => p.updateDynamic("onTagAdd")(v))
validateAsync.foreach(v => p.updateDynamic("validateAsync")(v))
onChangeInput.foreach(v => p.updateDynamic("onChangeInput")(v))
defaultValue.foreach(v => p.updateDynamic("defaultValue")(v))
transform.foreach(v => p.updateDynamic("transform")(v))
value.foreach(v => p.updateDynamic("value")(v))
val f = React.asInstanceOf[js.Dynamic].createFactory(js.Dynamic.global.ReactTagsInput)
f(p).asInstanceOf[ReactComponentU_]
}
}
trait ReactTagsInputM extends js.Object {
def focus(): Unit = js.native
def clear(): Unit = js.native
def getTags(): JArray[String] = js.native
def addTag(tag: String): Unit = js.native
def removeTag(tag: String): Unit = js.native
}
| coreyauger/scalajs-react-components | core/src/main/scala/chandu0101/scalajs/react/components/textfields/ReactTagsInput.scala | Scala | apache-2.0 | 3,903 |
package rxgpio.akka
import akka.actor.Actor
import gpio4s.gpiocfg.CfgIO.PinCfg
import rx.lang.scala.Subscription
import rxgpio.DefaultDigitalIO._
import rxgpio.Gpio.Implicits._
import rxgpio.pigpio.PigpioLibrary
import rxgpio.{Level, PinModes, RxGpio}
import wiii.inject._
class RxGpioPin(num: Int) extends Actor {
implicit val pigpio: PigpioLibrary = Inject[PigpioLibrary]
val in = RxGpio(num)
var subs: Subscription = _
def digitalIn: Receive = {
case DigitalRead(_) => sender() ! gpioRead(num)
case Reset(_) => reset()
}
def digitalOut: Receive = {
case DigitalWrite(_, s) => gpioWrite(num, Level(s))
case Reset(_) => reset()
}
def receive: Receive = {
case Setup(p) => setup(p)
}
def setup(pin: PinCfg) = {
pin.mode match {
case digital if pin.dir.isInput => {
subs = in.map(e => DigitalEvent(num, e.level.toBoolean)).subscribe(context.parent ! _)
gpioSetMode(num, PinModes.input)
context.become(digitalIn)
}
case digital if pin.dir.isOutput => {
gpioSetMode(num, PinModes.output)
context.become(digitalOut)
}
}
}
def reset(): Unit = {
subs.unsubscribe()
context.become(receive)
}
}
| jw3/rxgpio | akka/src/main/scala/rxgpio/akka/RxGpioPin.scala | Scala | apache-2.0 | 1,346 |
package scala.util.concurrent
sealed abstract class Duration {
def toJavaNanos: Long
}
object Duration {
import DurationHelpers._
case class Nanoseconds(length: Long) extends Duration {
def toJavaNanos = length
}
case class Microseconds(length: Long) extends Duration {
def toJavaNanos = x(length, C1/C0, Long.MaxValue/(C1/C0))
}
case class Milliseconds(length: Long) extends Duration {
def toJavaNanos = x(length, C2/C0, Long.MaxValue/(C2/C0))
}
case class Seconds(length: Long) extends Duration {
def toJavaNanos = x(length, C3/C0, Long.MaxValue/(C3/C0))
}
case class Minutes(length: Long) extends Duration {
def toJavaNanos = x(length, C4/C0, Long.MaxValue/(C4/C0))
}
case class Hours(length: Long) extends Duration {
def toJavaNanos = x(length, C5/C0, Long.MaxValue/(C5/C0))
}
case class Days(length: Long) extends Duration {
def toJavaNanos = x(length, C6/C0, Long.MaxValue/(C6/C0))
}
implicit def intWithDurationMethods(i: Int) = new {
def nanos = Nanoseconds(i)
def micros = Microseconds(i)
def millis = Milliseconds(i)
def seconds = Seconds(i)
def minutes = Minutes(i)
def hours = Hours(i)
def days = Days(i)
}
}
object DurationHelpers {
// Handy constants for conversion methods
// from TimeUnit.java
val C0 = 1L
val C1 = C0 * 1000
val C2 = C1 * 1000
val C3 = C2 * 1000
val C4 = C3 * 60
val C5 = C4 * 60
val C6 = C5 * 24
def x(d: Long, m: Long, over: Long): Long = {
if (d > over) Long.MaxValue
else if (d < -over) Long.MinValue
else d * m
}
}
| joshcough/Sweet | src/main/scala/scala/util/concurrent/Duration.scala | Scala | lgpl-2.1 | 1,601 |
package test001
import scalikejdbc._
import scalikejdbc.scalatest.AutoRollback
import org.scalatest.{ fixture, Matchers }
/**
* This spec demonstrates that using hasManyThrough.byDefault each other is impossible.
*/
class Spec extends fixture.FunSpec with Matchers with Connection with CreateTables with AutoRollback {
override def db(): DB = NamedDB('test001).toDB()
override def fixture(implicit session: DBSession): Unit = {
val t1_1 = Test1.createWithAttributes('name -> "foo-1")
val t1_2 = Test1.createWithAttributes('name -> "foo-2")
val t1_3 = Test1.createWithAttributes('name -> "foo-3")
val t2_1 = Test2.createWithAttributes('name -> "bar-1")
val t2_2 = Test2.createWithAttributes('name -> "bar-2")
Test1Test2.createWithAttributes('test1Id -> t1_1, 'test2Id -> t2_1)
Test1Test2.createWithAttributes('test1Id -> t1_1, 'test2Id -> t2_2)
Test1Test2.createWithAttributes('test1Id -> t1_2, 'test2Id -> t2_2)
}
describe("hasManyThrough byDefault each other") {
it("should work as expected") { implicit session =>
val (t1, t2) = (Test1.defaultAlias, Test2.defaultAlias)
val t1_1 = Test1.findBy(sqls.eq(t1.name, "foo-1"))
t1_1.isDefined should equal(true)
t1_1.get.test2.map(_.name).sorted should equal(Seq("bar-1", "bar-2"))
val t1_2 = Test1.findBy(sqls.eq(t1.name, "foo-2"))
t1_2.isDefined should equal(true)
t1_2.get.test2.map(_.name).sorted should equal(Seq("bar-2"))
val t1_3 = Test1.findBy(sqls.eq(t1.name, "foo-3"))
t1_3.isDefined should equal(true)
t1_3.get.test2 should equal(Nil)
val t2_1 = Test2.joins(Test2.test1Ref).findBy(sqls.eq(t2.name, "bar-1"))
t2_1.isDefined should equal(true)
t2_1.get.test1.map(_.name).sorted should equal(Seq("foo-1"))
val t2_2 = Test2.joins(Test2.test1Ref).findBy(sqls.eq(t2.name, "bar-2"))
t2_2.isDefined should equal(true)
t2_2.get.test1.map(_.name).sorted should equal(Seq("foo-1", "foo-2"))
}
}
}
| seratch/skinny-framework | orm/src/test/scala/test001/Spec.scala | Scala | mit | 2,005 |
/**
* Copyright 2013 Gianluca Amato
*
* This file is part of JANDOM: JVM-based Analyzer for Numerical DOMains
* JANDOM is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* JANDOM is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty ofa
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with JANDOM. If not, see <http://www.gnu.org/licenses/>.
*/
package it.unich.jandom.ppfactories
import it.unich.jandom.targets.Annotation
import it.unich.jandom.targets.Target
/**
* A "per program point factory" which reuses objects for the same program point.
* @tparam ProgramPoint the type of program point
* @param factory the nested factory for building new objects
* @param ann an annotation used to memoize the objects
* @author Gianluca Amato <[email protected]>
*/
class MemoizingFactory[ProgramPoint, T](private val factory: PPFactory[ProgramPoint, T],
private val ann: Annotation[ProgramPoint, T]) extends PPFactory[ProgramPoint, T] {
def apply(pp: ProgramPoint) = {
ann.get(pp) match {
case Some(v) => v
case None => { val v = factory(pp); ann(pp) = v; v }
}
}
}
/**
* The companion object for per program points factories
*/
object MemoizingFactory {
/**
* Builds a "per program point" factory, given an annotation
* @tparam ProgramPoint the type of program point
* @tparam T the type of the returned value for the factory
* @param factory the factory to build new widenings when needed
* @param ann annotation used to stored the generated widenings
* @return the factory
*/
def apply[ProgramPoint, T](factory: PPFactory[ProgramPoint, T], ann: Annotation[ProgramPoint, T]) =
new MemoizingFactory(factory, ann)
/**
* Builds a "per program point" factory using the standard annotation for a given target
* @tparam Tgt the type of target for the factory
* @tparam T the type of the returned value for the factory
* @param factory the factory to build new widenings when needed
* @param tgt the target
* @return the factory
*/
def apply[Tgt <: Target[Tgt], T](tgt: Tgt)(factory: PPFactory[tgt.ProgramPoint, T]): PPFactory[tgt.ProgramPoint,T] =
new MemoizingFactory(factory, tgt.getAnnotation[T])
}
| rubino22/JDBeta | core/src/main/scala/it/unich/jandom/ppfactories/MemoizingFactory.scala | Scala | lgpl-3.0 | 2,591 |
package org.sarrufat.sudoku.latinsqr
import org.sarrufat.sudoku.latinsqr.LatinSquare
object LSWS {;import org.scalaide.worksheet.runtime.library.WorksheetSupport._; def main(args: Array[String])=$execute{;$skip(129);
val ls = new LatinSquare(9);System.out.println("""ls : org.sarrufat.sudoku.latinsqr.LatinSquare = """ + $show(ls ))}
}
| sarrufat/ScalaSudoku | .worksheet/src/org.sarrufat.sudoku.latinsqr.LSWS.scala | Scala | gpl-3.0 | 343 |
package unfiltered.request
import org.specs._
object AcceptsSpecJetty extends unfiltered.spec.jetty.Served with AcceptsSpec {
def setup = { _.filter(unfiltered.filter.Planify(intent)) }
}
object AcceptsSpecNetty extends unfiltered.spec.netty.Served with AcceptsSpec {
def setup = { p =>
unfiltered.netty.Http(p).handler(unfiltered.netty.cycle.Planify(intent))
}
}
trait AcceptsSpec extends unfiltered.spec.Hosted {
import unfiltered.response._
import unfiltered.request._
import unfiltered.request.{Path => UFPath}
import dispatch._
def intent[A,B]: unfiltered.Cycle.Intent[A,B] = {
case GET(UFPath(Seg(ext :: Nil)) & Accepts.Json(_)) => ResponseString("json")
case GET(UFPath(Seg(ext :: Nil)) & Accepts.Jsonp(_)) => ResponseString("javascript")
case GET(UFPath(Seg(ext :: Nil)) & Accepts.Xml(_)) => ResponseString("xml")
case GET(UFPath(Seg(ext :: Nil)) & Accepts.Csv(_)) => ResponseString("csv")
case GET(UFPath(Seg(ext :: Nil)) & Accepts.Html(_)) => ResponseString("html")
}
"Accepts should" should {
"match an application/javascript accepts request as jsonp" in {
val resp = Http(host / "test" <:< Map("Accept" -> "application/javascript") as_str)
resp must_=="javascript"
}
"match an application/json accepts request as json" in {
val resp = Http(host / "test" <:< Map("Accept" -> "application/json") as_str)
resp must_=="json"
}
"match a mixed accepts request with json as json" in {
val resp = Http(host / "test" <:< Map("Accept" -> "application/xhtml+xml,text/xml;q=0.5,application/xml;q=0.9,*/*;q=0.8,application/json") as_str)
resp must_=="json"
}
"match a .json file extension as json when accepts is empty or contains a wildcard" in {
val resp = Http(host / "test.json" <:< Map("Accept" -> "*/*") as_str)
resp must_=="json"
}
"match a text/xml accepts request as xml" in {
val resp = Http(host / "test" <:< Map("Accept" -> "text/xml") as_str)
resp must_=="xml"
}
"match a mixed accepts request with xml as xml" in {
val resp = Http(host / "test" <:< Map("Accept" -> "text/html,application/xhtml+xml,text/xml;q=0.5,application/xml;q=0.9,*/*;q=0.8") as_str)
resp must_=="xml"
}
"match a .xml file extension as json when accepts is empty or contains a wildcard" in {
val resp = Http(host / "test.xml" <:< Map("Accept" -> "*/*") as_str)
resp must_=="xml"
}
"match a text/html accepts request as html" in {
val resp = Http(host / "test" <:< Map("Accept" -> "text/html") as_str)
resp must_=="html"
}
"match a mixed accepts request with html as html" in {
val resp = Http(host / "test" <:< Map("Accept" -> "application/octet-stream,text/html,application/pdf;q=0.9,*/*;q=0.8") as_str)
resp must_=="html"
}
"match a .html file extension as html when accepts is empty or contains a wildcard" in {
val resp = Http(host / "test.html" <:< Map("Accept" -> "*/*") as_str)
resp must_=="html"
}
"match a text/csv accepts request as csv" in {
val resp = Http(host / "test" <:< Map("Accept" -> "text/csv") as_str)
resp must_=="csv"
}
"match a mixed accepts request with csv as csv" in {
val resp = Http(host / "test" <:< Map("Accept" -> "text/html,text/csv,application/pdf;q=0.9,*/*;q=0.8") as_str)
resp must_=="csv"
}
"match a .csv file extension as csv when accepts is empty or contains a wildcard" in {
val resp = Http(host / "test.csv" <:< Map("Accept" -> "*/*") as_str)
resp must_=="csv"
}
}
}
| softprops/Unfiltered | library/src/test/scala/AcceptsSpec.scala | Scala | mit | 3,620 |
package sharry.common.syntax
object all extends StringSyntax with LoggerSyntax with StreamSyntax
| eikek/sharry | modules/common/src/main/scala/sharry/common/syntax/all.scala | Scala | gpl-3.0 | 98 |
package sylvestris.service.common
import spray.json._, DefaultJsonProtocol._
case class Relationship(label: Option[String], nodePath: String)
object Relationship {
implicit val jsonFormat = jsonFormat2(apply)
}
| janrain/sylvestris | service-common/src/main/scala/sylvestris/service/common/Relationship.scala | Scala | mit | 216 |
package mesosphere.marathon
package raml
import java.time.OffsetDateTime
import mesosphere.marathon.core.base.Clock
import mesosphere.marathon.core.launcher.OfferMatchResult
import mesosphere.marathon.core.launchqueue.LaunchQueue.QueuedInstanceInfoWithStatistics
import mesosphere.marathon.core.pod.PodDefinition
import mesosphere.marathon.state.AppDefinition
import mesosphere.mesos.NoOfferMatchReason
trait QueueInfoConversion extends DefaultConversions with OfferConversion {
implicit val rejectReasonWrites: Writes[NoOfferMatchReason, String] = Writes { _.toString }
implicit val unusedOfferWrites: Writes[OfferMatchResult.NoMatch, UnusedOffer] = Writes { noMatch =>
UnusedOffer(Raml.toRaml(noMatch.offer), Raml.toRaml(noMatch.reasons), noMatch.timestamp.toOffsetDateTime)
}
implicit val queueInfoWithStatisticsWrites: Writes[(QueuedInstanceInfoWithStatistics, Boolean, Clock), QueueItem] = Writes {
case (info, withLastUnused, clock) =>
def delay: Option[QueueDelay] = {
val timeLeft = clock.now() until info.backOffUntil
val overdue = timeLeft.toSeconds < 0
Some(QueueDelay(math.max(0, timeLeft.toSeconds), overdue = overdue))
}
/*
* `rejectSummaryLastOffers` should be a triple of (reason, amount declined, amount processed)
* and should reflect the `NoOfferMatchReason.reasonFunnel` to store only first non matching reason.
*
* @param processedOffers the amount of last processed offers
* @param summary the summary about the last processed offers
* @return calculated Seq of `DeclinedOfferStep`
*/
def declinedOfferSteps(processedOffers: Int, summary: Map[NoOfferMatchReason, Int]): Seq[DeclinedOfferStep] = {
val (_, rejectSummaryLastOffers) = NoOfferMatchReason.
reasonFunnel.foldLeft((processedOffers, Seq.empty[DeclinedOfferStep])) {
case ((processed: Int, seq: Seq[DeclinedOfferStep]), reason: NoOfferMatchReason) =>
val nextProcessed = processed - summary.getOrElse(reason, 0)
(nextProcessed, seq :+ DeclinedOfferStep(reason.toString, summary.getOrElse(reason, 0), processed))
}
rejectSummaryLastOffers
}
def processedOffersSummary: ProcessedOffersSummary = {
ProcessedOffersSummary(
processedOffersCount = info.processedOffersCount,
unusedOffersCount = info.unusedOffersCount,
lastUnusedOfferAt = info.lastNoMatch.map(_.timestamp.toOffsetDateTime),
lastUsedOfferAt = info.lastMatch.map(_.timestamp.toOffsetDateTime),
rejectSummaryLastOffers = declinedOfferSteps(info.lastNoMatches.size, info.rejectSummaryLastOffers),
rejectSummaryLaunchAttempt = declinedOfferSteps(info.processedOffersCount, info.rejectSummaryLaunchAttempt)
)
}
def queueItem[A](create: (Int, Option[QueueDelay], OffsetDateTime, ProcessedOffersSummary, Option[Seq[UnusedOffer]]) => A): A = {
create(
info.instancesLeftToLaunch,
delay,
info.startedAt.toOffsetDateTime,
processedOffersSummary,
if (withLastUnused) Some(Raml.toRaml(info.lastNoMatches)) else None
)
}
info.runSpec match {
case app: AppDefinition => queueItem(QueueApp(_, _, _, _, _, Raml.toRaml(app)))
case pod: PodDefinition => queueItem(QueuePod(_, _, _, _, _, Raml.toRaml(pod)))
}
}
implicit val queueWrites: Writes[(Seq[QueuedInstanceInfoWithStatistics], Boolean, Clock), Queue] = Writes {
case (infos, withLastUnused, clock) =>
Queue(infos.map(info => Raml.toRaml((info, withLastUnused, clock))))
}
}
| natemurthy/marathon | src/main/scala/mesosphere/marathon/raml/QueueInfoConversion.scala | Scala | apache-2.0 | 3,677 |
package org.http4s
package server
package blaze
import cats.data.Kleisli
import cats.effect._
import cats.effect.concurrent.Deferred
import cats.implicits._
import java.nio.ByteBuffer
import java.nio.charset.StandardCharsets
import org.http4s.{headers => H}
import org.http4s.blaze._
import org.http4s.blaze.pipeline.Command.Connected
import org.http4s.blaze.util.TickWheelExecutor
import org.http4s.blazecore.{ResponseParser, SeqTestHead}
import org.http4s.dsl.io._
import org.http4s.headers.{Date, `Content-Length`, `Transfer-Encoding`}
import org.specs2.specification.AfterAll
import org.specs2.specification.core.Fragment
import scala.concurrent.duration._
import scala.concurrent.Await
import _root_.io.chrisdavenport.vault._
class Http1ServerStageSpec extends Http4sSpec with AfterAll {
sequential
val tickWheel = new TickWheelExecutor()
def afterAll = tickWheel.shutdown()
def makeString(b: ByteBuffer): String = {
val p = b.position()
val a = new Array[Byte](b.remaining())
b.get(a).position(p)
new String(a)
}
def parseAndDropDate(buff: ByteBuffer): (Status, Set[Header], String) =
dropDate(ResponseParser.apply(buff))
def dropDate(resp: (Status, Set[Header], String)): (Status, Set[Header], String) = {
val hds = resp._2.filter(_.name != Date.name)
(resp._1, hds, resp._3)
}
def runRequest(
req: Seq[String],
httpApp: HttpApp[IO],
maxReqLine: Int = 4 * 1024,
maxHeaders: Int = 16 * 1024): SeqTestHead = {
val head = new SeqTestHead(
req.map(s => ByteBuffer.wrap(s.getBytes(StandardCharsets.ISO_8859_1))))
val httpStage = Http1ServerStage[IO](
httpApp,
() => Vault.empty,
testExecutionContext,
enableWebSockets = true,
maxReqLine,
maxHeaders,
10 * 1024,
DefaultServiceErrorHandler,
30.seconds,
30.seconds,
tickWheel
)
pipeline.LeafBuilder(httpStage).base(head)
head.sendInboundCommand(Connected)
head
}
"Http1ServerStage: Invalid Lengths" should {
val req = "GET /foo HTTP/1.1\\r\\nheader: value\\r\\n\\r\\n"
val routes = HttpRoutes
.of[IO] {
case _ => Ok("foo!")
}
.orNotFound
"fail on too long of a request line" in {
val buff = Await.result(runRequest(Seq(req), routes, maxReqLine = 1).result, 5.seconds)
val str = StandardCharsets.ISO_8859_1.decode(buff.duplicate()).toString
// make sure we don't have signs of chunked encoding.
str.contains("400 Bad Request") must_== true
}
"fail on too long of a header" in {
val buff = Await.result(runRequest(Seq(req), routes, maxHeaders = 1).result, 5.seconds)
val str = StandardCharsets.ISO_8859_1.decode(buff.duplicate()).toString
// make sure we don't have signs of chunked encoding.
str.contains("400 Bad Request") must_== true
}
}
"Http1ServerStage: Common responses" should {
Fragment.foreach(ServerTestRoutes.testRequestResults.zipWithIndex) {
case ((req, (status, headers, resp)), i) =>
if (i == 7 || i == 8) // Awful temporary hack
s"Run request $i Run request: --------\\n${req.split("\\r\\n\\r\\n")(0)}\\n" in {
val result = Await.result(runRequest(Seq(req), ServerTestRoutes()).result, 5.seconds)
parseAndDropDate(result) must_== ((status, headers, resp))
} else
s"Run request $i Run request: --------\\n${req.split("\\r\\n\\r\\n")(0)}\\n" in {
val result = Await.result(runRequest(Seq(req), ServerTestRoutes()).result, 5.seconds)
parseAndDropDate(result) must_== ((status, headers, resp))
}
}
}
"Http1ServerStage: Errors" should {
val exceptionService = HttpRoutes
.of[IO] {
case GET -> Root / "sync" => sys.error("Synchronous error!")
case GET -> Root / "async" => IO.raiseError(new Exception("Asynchronous error!"))
case GET -> Root / "sync" / "422" =>
throw InvalidMessageBodyFailure("lol, I didn't even look")
case GET -> Root / "async" / "422" =>
IO.raiseError(InvalidMessageBodyFailure("lol, I didn't even look"))
}
.orNotFound
def runError(path: String) =
runRequest(List(path), exceptionService).result
.map(parseAndDropDate)
.map {
case (s, h, r) =>
val close = h.exists { h =>
h.toRaw.name == "connection".ci && h.toRaw.value == "close"
}
(s, close, r)
}
"Deal with synchronous errors" in {
val path = "GET /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\n\\r\\n"
val (s, c, _) = Await.result(runError(path), 10.seconds)
s must_== InternalServerError
c must_== true
}
"Call toHttpResponse on synchronous errors" in {
val path = "GET /sync/422 HTTP/1.1\\r\\nConnection:keep-alive\\r\\n\\r\\n"
val (s, c, _) = Await.result(runError(path), 10.seconds)
s must_== UnprocessableEntity
c must_== false
}
"Deal with asynchronous errors" in {
val path = "GET /async HTTP/1.1\\r\\nConnection:keep-alive\\r\\n\\r\\n"
val (s, c, _) = Await.result(runError(path), 10.seconds)
s must_== InternalServerError
c must_== true
}
"Call toHttpResponse on asynchronous errors" in {
val path = "GET /async/422 HTTP/1.1\\r\\nConnection:keep-alive\\r\\n\\r\\n"
val (s, c, _) = Await.result(runError(path), 10.seconds)
s must_== UnprocessableEntity
c must_== false
}
"Handle parse error" in {
val path = "THIS\\u0000IS\\u0000NOT\\u0000HTTP"
val (s, c, _) = Await.result(runError(path), 10.seconds)
s must_== BadRequest
c must_== true
}
}
"Http1ServerStage: routes" should {
"Do not send `Transfer-Encoding: identity` response" in {
val routes = HttpRoutes
.of[IO] {
case _ =>
val headers = Headers.of(H.`Transfer-Encoding`(TransferCoding.identity))
IO.pure(Response[IO](headers = headers)
.withEntity("hello world"))
}
.orNotFound
// The first request will get split into two chunks, leaving the last byte off
val req = "GET /foo HTTP/1.1\\r\\n\\r\\n"
val buff = Await.result(runRequest(Seq(req), routes).result, 5.seconds)
val str = StandardCharsets.ISO_8859_1.decode(buff.duplicate()).toString
// make sure we don't have signs of chunked encoding.
str.contains("0\\r\\n\\r\\n") must_== false
str.contains("hello world") must_== true
val (_, hdrs, _) = ResponseParser.apply(buff)
hdrs.find(_.name == `Transfer-Encoding`.name) must_== None
}
"Do not send an entity or entity-headers for a status that doesn't permit it" in {
val routes: HttpApp[IO] = HttpRoutes
.of[IO] {
case _ =>
IO.pure(
Response[IO](status = Status.NotModified)
.putHeaders(`Transfer-Encoding`(TransferCoding.chunked))
.withEntity("Foo!"))
}
.orNotFound
val req = "GET /foo HTTP/1.1\\r\\n\\r\\n"
val buf = Await.result(runRequest(Seq(req), routes).result, 5.seconds)
val (status, hs, body) = ResponseParser.parseBuffer(buf)
val hss = Headers(hs.toList)
`Content-Length`.from(hss).isDefined must_== false
body must_== ""
status must_== Status.NotModified
}
"Add a date header" in {
val routes = HttpRoutes
.of[IO] {
case req => IO.pure(Response(body = req.body))
}
.orNotFound
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val buff = Await.result(runRequest(Seq(req1), routes).result, 5.seconds)
// Both responses must succeed
val (_, hdrs, _) = ResponseParser.apply(buff)
hdrs.find(_.name == Date.name) must beSome[Header]
}
"Honor an explicitly added date header" in {
val dateHeader = Date(HttpDate.Epoch)
val routes = HttpRoutes
.of[IO] {
case req => IO.pure(Response(body = req.body).withHeaders(dateHeader))
}
.orNotFound
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val buff = Await.result(runRequest(Seq(req1), routes).result, 5.seconds)
// Both responses must succeed
val (_, hdrs, _) = ResponseParser.apply(buff)
hdrs.find(_.name == Date.name) must_== Some(dateHeader)
}
"Handle routes that echos full request body for non-chunked" in {
val routes = HttpRoutes
.of[IO] {
case req => IO.pure(Response(body = req.body))
}
.orNotFound
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val (r11, r12) = req1.splitAt(req1.length - 1)
val buff = Await.result(runRequest(Seq(r11, r12), routes).result, 5.seconds)
// Both responses must succeed
parseAndDropDate(buff) must_== ((Ok, Set(H.`Content-Length`.unsafeFromLong(4)), "done"))
}
"Handle routes that consumes the full request body for non-chunked" in {
val routes = HttpRoutes
.of[IO] {
case req =>
req.as[String].map { s =>
Response().withEntity("Result: " + s)
}
}
.orNotFound
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val (r11, r12) = req1.splitAt(req1.length - 1)
val buff = Await.result(runRequest(Seq(r11, r12), routes).result, 5.seconds)
// Both responses must succeed
parseAndDropDate(buff) must_== (
(
Ok,
Set(
H.`Content-Length`.unsafeFromLong(8 + 4),
H.`Content-Type`(MediaType.text.plain, Charset.`UTF-8`)),
"Result: done"))
}
"Maintain the connection if the body is ignored but was already read to completion by the Http1Stage" in {
val routes = HttpRoutes
.of[IO] {
case _ => IO.pure(Response().withEntity("foo"))
}
.orNotFound
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val req2 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 5\\r\\n\\r\\ntotal"
val buff = Await.result(runRequest(Seq(req1, req2), routes).result, 5.seconds)
val hs = Set(
H.`Content-Type`(MediaType.text.plain, Charset.`UTF-8`),
H.`Content-Length`.unsafeFromLong(3))
// Both responses must succeed
dropDate(ResponseParser.parseBuffer(buff)) must_== ((Ok, hs, "foo"))
dropDate(ResponseParser.parseBuffer(buff)) must_== ((Ok, hs, "foo"))
}
"Drop the connection if the body is ignored and was not read to completion by the Http1Stage" in {
val routes = HttpRoutes
.of[IO] {
case _ => IO.pure(Response().withEntity("foo"))
}
.orNotFound
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val (r11, r12) = req1.splitAt(req1.length - 1)
val req2 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 5\\r\\n\\r\\ntotal"
val buff = Await.result(runRequest(Seq(r11, r12, req2), routes).result, 5.seconds)
val hs = Set(
H.`Content-Type`(MediaType.text.plain, Charset.`UTF-8`),
H.`Content-Length`.unsafeFromLong(3))
// Both responses must succeed
dropDate(ResponseParser.parseBuffer(buff)) must_== ((Ok, hs, "foo"))
buff.remaining() must_== 0
}
"Handle routes that runs the request body for non-chunked" in {
val routes = HttpRoutes
.of[IO] {
case req => req.body.compile.drain *> IO.pure(Response().withEntity("foo"))
}
.orNotFound
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val (r11, r12) = req1.splitAt(req1.length - 1)
val req2 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 5\\r\\n\\r\\ntotal"
val buff = Await.result(runRequest(Seq(r11, r12, req2), routes).result, 5.seconds)
val hs = Set(
H.`Content-Type`(MediaType.text.plain, Charset.`UTF-8`),
H.`Content-Length`.unsafeFromLong(3))
// Both responses must succeed
dropDate(ResponseParser.parseBuffer(buff)) must_== ((Ok, hs, "foo"))
dropDate(ResponseParser.parseBuffer(buff)) must_== ((Ok, hs, "foo"))
}
// Think of this as drunk HTTP pipelining
"Not die when two requests come in back to back" in {
val routes = HttpRoutes
.of[IO] {
case req =>
IO.pure(Response(body = req.body))
}
.orNotFound
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val req2 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 5\\r\\n\\r\\ntotal"
val buff = Await.result(runRequest(Seq(req1 + req2), routes).result, 5.seconds)
// Both responses must succeed
dropDate(ResponseParser.parseBuffer(buff)) must_== (
(
Ok,
Set(H.`Content-Length`.unsafeFromLong(4)),
"done"))
dropDate(ResponseParser.parseBuffer(buff)) must_== (
(
Ok,
Set(H.`Content-Length`.unsafeFromLong(5)),
"total"))
}
"Handle using the request body as the response body" in {
val routes = HttpRoutes
.of[IO] {
case req => IO.pure(Response(body = req.body))
}
.orNotFound
// The first request will get split into two chunks, leaving the last byte off
val req1 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val req2 = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 5\\r\\n\\r\\ntotal"
val buff = Await.result(runRequest(Seq(req1, req2), routes).result, 5.seconds)
// Both responses must succeed
dropDate(ResponseParser.parseBuffer(buff)) must_== (
(
Ok,
Set(H.`Content-Length`.unsafeFromLong(4)),
"done"))
dropDate(ResponseParser.parseBuffer(buff)) must_== (
(
Ok,
Set(H.`Content-Length`.unsafeFromLong(5)),
"total"))
}
{
def req(path: String) =
s"GET /$path HTTP/1.1\\r\\nTransfer-Encoding: chunked\\r\\n\\r\\n" +
"3\\r\\n" +
"foo\\r\\n" +
"0\\r\\n" +
"Foo:Bar\\r\\n\\r\\n"
val routes = HttpRoutes
.of[IO] {
case req if req.pathInfo == "/foo" =>
for {
_ <- req.body.compile.drain
hs <- req.trailerHeaders
resp <- Ok(hs.toList.mkString)
} yield resp
case req if req.pathInfo == "/bar" =>
for {
// Don't run the body
hs <- req.trailerHeaders
resp <- Ok(hs.toList.mkString)
} yield resp
}
.orNotFound
"Handle trailing headers" in {
val buff = Await.result(runRequest(Seq(req("foo")), routes).result, 5.seconds)
val results = dropDate(ResponseParser.parseBuffer(buff))
results._1 must_== Ok
results._3 must_== "Foo: Bar"
}
"Fail if you use the trailers before they have resolved" in {
val buff = Await.result(runRequest(Seq(req("bar")), routes).result, 5.seconds)
val results = dropDate(ResponseParser.parseBuffer(buff))
results._1 must_== InternalServerError
}
}
}
"cancels on stage shutdown" in skipOnCi {
Deferred[IO, Unit]
.flatMap { canceled =>
Deferred[IO, Unit].flatMap { gate =>
val req = "POST /sync HTTP/1.1\\r\\nConnection:keep-alive\\r\\nContent-Length: 4\\r\\n\\r\\ndone"
val app: HttpApp[IO] = HttpApp { _ =>
gate.complete(()) >> IO.cancelable(_ => canceled.complete(()))
}
for {
head <- IO(runRequest(List(req), app))
_ <- gate.get
_ <- IO(head.closePipeline(None))
_ <- canceled.get
} yield ()
}
}
.unsafeRunTimed(3.seconds) must beSome(())
}
"Disconnect if we read an EOF" in {
val head = runRequest(Seq.empty, Kleisli.liftF(Ok("")))
Await.ready(head.result, 10.seconds)
head.closeCauses must_== Seq(None)
}
}
| aeons/http4s | blaze-server/src/test/scala/org/http4s/server/blaze/Http1ServerStageSpec.scala | Scala | apache-2.0 | 17,003 |
/*******************************************************************************
* (C) Copyright 2015 ADP, LLC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package unicorn
import java.util.{Date, UUID}
import unicorn.oid.BsonObjectId
/**
* @author Haifeng Li
*/
package object json {
type JsTopLevel = Either[JsObject, JsArray]
val JsTrue = new JsBoolean(true)
val JsFalse = new JsBoolean(false)
implicit class JsonHelper(private val sc: StringContext) extends AnyVal {
def json(args: Any*): JsObject = {
JsonParser(sc.s(args: _*).stripMargin).asInstanceOf[JsObject]
}
}
implicit def jsObjectTopLevel(x: JsObject) = Left(x)
implicit def jsArrayTopLevel(x: JsArray) = Right(x)
implicit def pimpString(string: String) = new PimpedString(string)
implicit def boolean2JsValue(x: Boolean) = JsBoolean(x)
implicit def int2JsValue(x: Int) = JsInt(x)
implicit def long2JsValue(x: Long) = JsLong(x)
implicit def double2JsValue(x: Double) = JsDouble(x)
implicit def string2JsValue(x: String) = JsString(x)
implicit def date2JsValue(x: Date) = JsDate(x)
implicit def uuid2JsValue(x: UUID) = JsUUID(x)
implicit def objectId2JsValue(x: BsonObjectId) = JsObjectId(x)
implicit def byteArray2JsValue(x: Array[Byte]) = JsBinary(x)
implicit def array2JsValue(x: Array[JsValue]) = JsArray(x: _*)
implicit def seq2JsValue(x: Seq[JsValue]) = JsArray(x: _*)
implicit def map2JsValue(x: Seq[(String, JsValue)]) = JsObject(x: _*)
implicit def map2JsValue(x: collection.mutable.Map[String, JsValue]) = JsObject(x)
implicit def map2JsValue(x: collection.immutable.Map[String, JsValue]) = JsObject(x)
implicit def pimpBooleanSeq(x: Seq[Boolean]) = new PimpedBooleanSeq(x)
implicit def pimpIntSeq(x: Seq[Int]) = new PimpedIntSeq(x)
implicit def pimpLongSeq(x: Seq[Long]) = new PimpedLongSeq(x)
implicit def pimpDoubleSeq(x: Seq[Double]) = new PimpedDoubleSeq(x)
implicit def pimpStringSeq(x: Seq[String]) = new PimpedStringSeq(x)
implicit def pimpDateSeq(x: Seq[Date]) = new PimpedDateSeq(x)
implicit def pimpBooleanArray(x: Array[Boolean]) = new PimpedBooleanSeq(x)
implicit def pimpIntArray(x: Array[Int]) = new PimpedIntSeq(x)
implicit def pimpLongArray(x: Array[Long]) = new PimpedLongSeq(x)
implicit def pimpDoubleArray(x: Array[Double]) = new PimpedDoubleSeq(x)
implicit def pimpStringArray(x: Array[String]) = new PimpedStringSeq(x)
implicit def pimpDateArray(x: Array[Date]) = new PimpedDateSeq(x)
implicit def pimpBooleanMap(x: Map[String, Boolean]) = new PimpedBooleanMap(x)
implicit def pimpIntMap(x: Map[String, Int]) = new PimpedIntMap(x)
implicit def pimpLongMap(x: Map[String, Long]) = new PimpedLongMap(x)
implicit def pimpDoubleMap(x: Map[String, Double]) = new PimpedDoubleMap(x)
implicit def pimpStringMap(x: Map[String, String]) = new PimpedStringMap(x)
implicit def pimpDateMap(x: Map[String, Date]) = new PimpedDateMap(x)
implicit def pimpBooleanMutableMap(x: collection.mutable.Map[String, Boolean]) = new PimpedBooleanMutableMap(x)
implicit def pimpIntMutableMap(x: collection.mutable.Map[String, Int]) = new PimpedIntMutableMap(x)
implicit def pimpLongMutableMap(x: collection.mutable.Map[String, Long]) = new PimpedLongMutableMap(x)
implicit def pimpDoubleMutableMap(x: collection.mutable.Map[String, Double]) = new PimpedDoubleMutableMap(x)
implicit def pimpStringMutableMap(x: collection.mutable.Map[String, String]) = new PimpedStringMutableMap(x)
implicit def pimpDateMutableMap(x: collection.mutable.Map[String, Date]) = new PimpedDateMutableMap(x)
implicit def json2Boolean(x: JsBoolean) = x.value
implicit def json2Int(x: JsInt) = x.value
implicit def json2Long(x: JsLong) = x.value
implicit def json2Double(x: JsDouble) = x.value
implicit def json2String(x: JsString) = x.value
implicit def json2Date(x: JsDate) = x.value
implicit def json2UUID(x: JsUUID) = x.value
implicit def json2Binary(x: JsBinary) = x.value
implicit def json2Boolean(json: JsValue): Boolean = json.asBoolean
implicit def json2Int(json: JsValue): Int = json.asInt
implicit def json2Long(json: JsValue): Long = json.asLong
implicit def json2Double(json: JsValue): Double = json.asDouble
implicit def json2Date(json: JsValue): Date = json.asDate
implicit def json2String(json: JsValue): String = json.toString
implicit def json2ByteArray(json: JsValue): Array[Byte] = json match {
case JsBinary(x) => x
case _ => throw new UnsupportedOperationException("convert JsValue to Array[Byte]")
}
}
package json {
private[json] class PimpedString(string: String) {
def parseJson: JsValue = JsonParser(string)
def parseJsObject: JsObject = parseJson.asInstanceOf[JsObject]
}
private[json] class PimpedBooleanSeq(seq: Seq[Boolean]) {
def toJsArray: JsArray = JsArray(seq.map {e => JsBoolean(e)}: _*)
}
private[json] class PimpedIntSeq(seq: Seq[Int]) {
def toJsArray: JsArray = JsArray(seq.map {e => JsInt(e)}: _*)
}
private[json] class PimpedLongSeq(seq: Seq[Long]) {
def toJsArray: JsArray = JsArray(seq.map {e => JsLong(e)}: _*)
}
private[json] class PimpedDoubleSeq(seq: Seq[Double]) {
def toJsArray: JsArray = JsArray(seq.map {e => JsDouble(e)}: _*)
}
private[json] class PimpedStringSeq(seq: Seq[String]) {
def toJsArray: JsArray = JsArray(seq.map {e => JsString(e)}: _*)
}
private[json] class PimpedDateSeq(seq: Seq[Date]) {
def toJsArray: JsArray = JsArray(seq.map {e => JsDate(e)}: _*)
}
private[json] class PimpedBooleanMap(map: Map[String, Boolean]) {
def toJsObject: JsObject = JsObject(map.map { case (k, v) => (k, JsBoolean(v)) })
}
private[json] class PimpedIntMap(map: Map[String, Int]) {
def toJsObject: JsObject = JsObject(map.map { case (k, v) => (k, JsInt(v)) })
}
private[json] class PimpedLongMap(map: Map[String, Long]) {
def toJsObject: JsObject = JsObject(map.map { case (k, v) => (k, JsLong(v)) })
}
private[json] class PimpedDoubleMap(map: Map[String, Double]) {
def toJsObject: JsObject = JsObject(map.map { case (k, v) => (k, JsDouble(v)) })
}
private[json] class PimpedStringMap(map: Map[String, String]) {
def toJsObject: JsObject = JsObject(map.map { case (k, v) => (k, JsString(v)) })
}
private[json] class PimpedDateMap(map: Map[String, Date]) {
def toJsObject: JsObject = JsObject(map.map { case (k, v) => (k, JsDate(v)) })
}
private[json] class PimpedBooleanMutableMap(map: collection.mutable.Map[String, Boolean]) {
def toJsObject: JsObject = JsObject(map.map { case (k, v) =>
val js: JsValue = JsBoolean(v)
(k, js)
})
}
private[json] class PimpedIntMutableMap(map: collection.mutable.Map[String, Int]) {
def toJsObject: JsObject = JsObject(map.map { case (k, v) =>
val js: JsValue = JsInt(v)
(k, js)
})
}
private[json] class PimpedLongMutableMap(map: collection.mutable.Map[String, Long]) {
def toJsObject: JsObject = JsObject(map.map { case (k, v) =>
val js: JsValue = JsLong(v)
(k, js)
})
}
private[json] class PimpedDoubleMutableMap(map: collection.mutable.Map[String, Double]) {
def toJsObject: JsObject = JsObject(map.map { case (k, v) =>
val js: JsValue = JsDouble(v)
(k, js)
})
}
private[json] class PimpedStringMutableMap(map: collection.mutable.Map[String, String]) {
def toJsObject: JsObject = JsObject(map.map { case (k, v) =>
val js: JsValue = JsString(v)
(k, js)
})
}
private[json] class PimpedDateMutableMap(map: collection.mutable.Map[String, Date]) {
def toJsObject: JsObject = JsObject(map.map { case (k, v) =>
val js: JsValue = JsDate(v)
(k, js)
})
}
}
| adplabs/unicorn | json/src/main/scala/unicorn/json/package.scala | Scala | apache-2.0 | 8,310 |
/******************************************************************************
* Copyright (c) 2014, Equal Experts Ltd
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
* PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER
* OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* The views and conclusions contained in the software and documentation
* are those of the authors and should not be interpreted as representing
* official policies, either expressed or implied, of the Midas Project.
******************************************************************************/
package com.ee.midas.utils
import org.specs2.mutable.Specification
import java.io.File
import java.util.concurrent.TimeUnit
import org.junit.runner.RunWith
import org.specs2.runner.JUnitRunner
import org.specs2.mock.Mockito
@RunWith(classOf[JUnitRunner])
class FileWatcherSpecs extends Specification with Mockito {
def waitForWatcherToWatch(millis: Int) = Thread.sleep(millis)
"File Watcher" should {
"start watching file and execute the given action when file is modified" in {
//given
val file = mock[File]
file.exists() returns true
file.getAbsolutePath returns "/some/path"
file.lastModified() returns 1 thenReturn 2
var timesExecuted = 0
val watcher = new FileWatcher(file, 10, TimeUnit.MILLISECONDS)(
timesExecuted += 1
)
//when
watcher.start
//then: the action was performed once
waitForWatcherToWatch(50)
watcher.stop
timesExecuted mustEqual 1
}
"watch file and execute given action twice as the file is modified twice" in {
//given: A file
val file = mock[File]
file.exists() returns true
file.getAbsolutePath returns "/some/path"
file.lastModified returns 1 thenReturns 2 thenReturns 3
//and: a FileWatcher running on it
var timesExecuted = 0
val watcher = new FileWatcher(file, 10, TimeUnit.MILLISECONDS)(
timesExecuted += 1
)
//when
watcher.start
//then
waitForWatcherToWatch(50)
watcher.stop
timesExecuted mustEqual 2
}
"watch file and never execute given action when file is never modified" in {
//given: A file
val file = mock[File]
file.exists() returns true
file.getAbsolutePath returns "/some/path"
file.lastModified returns 1
//and: a FileWatcher running on it
var timesExecuted = 0
val watcher = new FileWatcher(file, 10, TimeUnit.MILLISECONDS)(
timesExecuted += 1
)
//when
watcher.start
//then
waitForWatcherToWatch(50)
watcher.stop
timesExecuted mustEqual 0
}
"watch file and never execute again if given action throws an exception" in {
//given: A file
val file = mock[File]
file.exists() returns true
file.getAbsolutePath returns "/some/path"
file.lastModified returns 1 thenReturns 2 thenReturns 3
//and: a FileWatcher running on it
var timesExecuted = 0
val watcher = new FileWatcher(file, 10, TimeUnit.MILLISECONDS) ({
timesExecuted += 1
throw new IllegalArgumentException("on purpose")
})
//when
watcher.start
//then
waitForWatcherToWatch(50)
watcher.stop
timesExecuted mustEqual 1
}
"watch file and continue execution even if given action throws an exception" in {
//given: A file
val file = mock[File]
file.exists() returns true
file.getAbsolutePath returns "/some/path"
file.lastModified returns 1 thenReturns 2 thenReturns 3
//and: a FileWatcher running on it
var timesExecuted = 0
val watcher = new FileWatcher(file, 10, TimeUnit.MILLISECONDS, stopOnException = false) ({
timesExecuted += 1
throw new IllegalArgumentException("on purpose")
})
//when
watcher.start
//then
waitForWatcherToWatch(50)
watcher.stop
timesExecuted mustEqual 2
}
}
}
| EqualExperts/Midas | src/test/scala/com/ee/midas/utils/FileWatcherSpecs.scala | Scala | bsd-2-clause | 5,173 |
// These are meant to be typed into the REPL. You can also run
// scala -Xnojline < repl-session.scala to run them all at once.
// An invariant Pair class
class Pair[T](var first: T, var second: T) {
override def toString = "(" + first + "," + second + ")"
}
class Person(val name: String) {
override def toString = getClass.getName + " " + name
}
class Student(name: String) extends Person(name)
def makeFriends(p: Pair[_ <: Person]) =
p.first.name + " and " + p.second.name + " are now friends."
val fred = new Student("Fred")
val wilma = new Student("Wilma")
val studentPair = new Pair(fred, wilma)
makeFriends(studentPair) // OK
import java.util.Comparator
def min[T](p: Pair[T])(comp: Comparator[_ >: T]) =
if (comp.compare(p.first, p.second) < 0) p.first else p.second
// Just a silly example
val sillyHashComp = new Comparator[Object] {
def compare(a: Object, b: Object) = a.hashCode() - b.hashCode()
}
"Fred".hashCode
"Wilma".hashCode
// Note that the comparator uses a supertype of T = String
min(new Pair("Fred", "Wilma"))(sillyHashComp)
// This should work, but it doesn't in Scala 2.12
def min[T <: Comparable[_ >: T]](p: Pair[T]) =
if (p.first.compareTo(p.second) < 0) p.first else p.second
// Here is a workaround
type SuperComparable[T] = Comparable[_ >: T]
def min[T <: SuperComparable[T]](p: Pair[T]) =
if (p.first.compareTo(p.second) < 0) p.first else p.second
class Person(val name: String) extends Comparable[Person] {
override def toString = getClass.getName + " " + name
def compareTo(other: Person) = name.compareTo(other.name)
}
class Student(name: String) extends Person(name)
// Note that Student <: Comparable[Person]
val fred = new Student("Fred")
val wilma = new Student("Wilma")
val studentPair = new Pair(fred, wilma)
min(studentPair)
| yeahnoob/scala-impatient-2e-code | src/ch18/sec12/repl-session.scala | Scala | gpl-3.0 | 1,809 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.metric
import java.io.File
import scala.collection.mutable.HashMap
import org.apache.spark.TestUtils
import org.apache.spark.scheduler.{SparkListener, SparkListenerTaskEnd}
import org.apache.spark.sql.DataFrame
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.execution.SparkPlanInfo
import org.apache.spark.sql.execution.ui.{SparkPlanGraph, SQLAppStatusStore}
import org.apache.spark.sql.internal.SQLConf.WHOLESTAGE_CODEGEN_ENABLED
import org.apache.spark.sql.test.SQLTestUtils
trait SQLMetricsTestUtils extends SQLTestUtils {
import testImplicits._
protected def currentExecutionIds(): Set[Long] = {
spark.sparkContext.listenerBus.waitUntilEmpty(10000)
statusStore.executionsList.map(_.executionId).toSet
}
protected def statusStore: SQLAppStatusStore = spark.sharedState.statusStore
// Pattern of size SQLMetric value, e.g. "\\n96.2 MiB (32.1 MiB, 32.1 MiB, 32.1 MiB)"
protected val sizeMetricPattern = {
val bytes = "([0-9]+(\\\\.[0-9]+)?) (EiB|PiB|TiB|GiB|MiB|KiB|B)"
s"\\\\n$bytes \\\\($bytes, $bytes, $bytes\\\\)"
}
// Pattern of timing SQLMetric value, e.g. "\\n2.0 ms (1.0 ms, 1.0 ms, 1.0 ms)"
protected val timingMetricPattern = {
val duration = "([0-9]+(\\\\.[0-9]+)?) (ms|s|m|h)"
s"\\\\n$duration \\\\($duration, $duration, $duration\\\\)"
}
/**
* Get execution metrics for the SQL execution and verify metrics values.
*
* @param metricsValues the expected metric values (numFiles, numPartitions, numOutputRows).
* @param func the function can produce execution id after running.
*/
private def verifyWriteDataMetrics(metricsValues: Seq[Int])(func: => Unit): Unit = {
val previousExecutionIds = currentExecutionIds()
// Run the given function to trigger query execution.
func
spark.sparkContext.listenerBus.waitUntilEmpty(10000)
val executionIds = currentExecutionIds().diff(previousExecutionIds)
assert(executionIds.size == 1)
val executionId = executionIds.head
val executedNode = statusStore.planGraph(executionId).nodes.head
val metricsNames = Seq(
"number of written files",
"number of dynamic part",
"number of output rows")
val metrics = statusStore.executionMetrics(executionId)
metricsNames.zip(metricsValues).foreach { case (metricsName, expected) =>
val sqlMetric = executedNode.metrics.find(_.name == metricsName)
assert(sqlMetric.isDefined)
val accumulatorId = sqlMetric.get.accumulatorId
val metricValue = metrics(accumulatorId).replaceAll(",", "").toInt
assert(metricValue == expected)
}
val totalNumBytesMetric = executedNode.metrics.find(
_.name == "written output total (min, med, max)").get
val totalNumBytes = metrics(totalNumBytesMetric.accumulatorId).replaceAll(",", "")
.split(" ").head.trim.toDouble
assert(totalNumBytes > 0)
}
protected def testMetricsNonDynamicPartition(
dataFormat: String,
tableName: String): Unit = {
withTable(tableName) {
Seq((1, 2)).toDF("i", "j")
.write.format(dataFormat).mode("overwrite").saveAsTable(tableName)
val tableLocation =
new File(spark.sessionState.catalog.getTableMetadata(TableIdentifier(tableName)).location)
// 2 files, 100 rows, 0 dynamic partition.
verifyWriteDataMetrics(Seq(2, 0, 100)) {
(0 until 100).map(i => (i, i + 1)).toDF("i", "j").repartition(2)
.write.format(dataFormat).mode("overwrite").insertInto(tableName)
}
assert(TestUtils.recursiveList(tableLocation).count(_.getName.startsWith("part-")) == 2)
}
}
protected def testMetricsDynamicPartition(
provider: String,
dataFormat: String,
tableName: String): Unit = {
withTable(tableName) {
withTempPath { dir =>
spark.sql(
s"""
|CREATE TABLE $tableName(a int, b int)
|USING $provider
|PARTITIONED BY(a)
|LOCATION '${dir.toURI}'
""".stripMargin)
val table = spark.sessionState.catalog.getTableMetadata(TableIdentifier(tableName))
assert(table.location == makeQualifiedPath(dir.getAbsolutePath))
val df = spark.range(start = 0, end = 40, step = 1, numPartitions = 1)
.selectExpr("id a", "id b")
// 40 files, 80 rows, 40 dynamic partitions.
verifyWriteDataMetrics(Seq(40, 40, 80)) {
df.union(df).repartition(2, $"a")
.write
.format(dataFormat)
.mode("overwrite")
.insertInto(tableName)
}
assert(TestUtils.recursiveList(dir).count(_.getName.startsWith("part-")) == 40)
}
}
}
/**
* Call `df.collect()` and collect necessary metrics from execution data.
*
* @param df `DataFrame` to run
* @param expectedNumOfJobs number of jobs that will run
* @param expectedNodeIds the node ids of the metrics to collect from execution data.
* @param enableWholeStage enable whole-stage code generation or not.
*/
protected def getSparkPlanMetrics(
df: DataFrame,
expectedNumOfJobs: Int,
expectedNodeIds: Set[Long],
enableWholeStage: Boolean = false): Option[Map[Long, (String, Map[String, Any])]] = {
val previousExecutionIds = currentExecutionIds()
withSQLConf(WHOLESTAGE_CODEGEN_ENABLED.key -> enableWholeStage.toString) {
df.collect()
}
sparkContext.listenerBus.waitUntilEmpty(10000)
val executionIds = currentExecutionIds().diff(previousExecutionIds)
assert(executionIds.size === 1)
val executionId = executionIds.head
val jobs = statusStore.execution(executionId).get.jobs
// Use "<=" because there is a race condition that we may miss some jobs
// TODO Change it to "=" once we fix the race condition that missing the JobStarted event.
assert(jobs.size <= expectedNumOfJobs)
if (jobs.size == expectedNumOfJobs) {
// If we can track all jobs, check the metric values
val metricValues = statusStore.executionMetrics(executionId)
val metrics = SparkPlanGraph(SparkPlanInfo.fromSparkPlan(
df.queryExecution.executedPlan)).allNodes.filter { node =>
expectedNodeIds.contains(node.id)
}.map { node =>
val nodeMetrics = node.metrics.map { metric =>
val metricValue = metricValues(metric.accumulatorId)
(metric.name, metricValue)
}.toMap
(node.id, node.name -> nodeMetrics)
}.toMap
Some(metrics)
} else {
// TODO Remove this "else" once we fix the race condition that missing the JobStarted event.
// Since we cannot track all jobs, the metric values could be wrong and we should not check
// them.
logWarning("Due to a race condition, we miss some jobs and cannot verify the metric values")
None
}
}
/**
* Call `df.collect()` and verify if the collected metrics are same as "expectedMetrics".
*
* @param df `DataFrame` to run
* @param expectedNumOfJobs number of jobs that will run
* @param expectedMetrics the expected metrics. The format is
* `nodeId -> (operatorName, metric name -> metric value)`.
*/
protected def testSparkPlanMetrics(
df: DataFrame,
expectedNumOfJobs: Int,
expectedMetrics: Map[Long, (String, Map[String, Any])]): Unit = {
val expectedMetricsPredicates = expectedMetrics.mapValues { case (nodeName, nodeMetrics) =>
(nodeName, nodeMetrics.mapValues(expectedMetricValue =>
(actualMetricValue: Any) => expectedMetricValue.toString === actualMetricValue))
}
testSparkPlanMetricsWithPredicates(df, expectedNumOfJobs, expectedMetricsPredicates)
}
/**
* Call `df.collect()` and verify if the collected metrics satisfy the specified predicates.
* @param df `DataFrame` to run
* @param expectedNumOfJobs number of jobs that will run
* @param expectedMetricsPredicates the expected metrics predicates. The format is
* `nodeId -> (operatorName, metric name -> metric predicate)`.
* @param enableWholeStage enable whole-stage code generation or not.
*/
protected def testSparkPlanMetricsWithPredicates(
df: DataFrame,
expectedNumOfJobs: Int,
expectedMetricsPredicates: Map[Long, (String, Map[String, Any => Boolean])],
enableWholeStage: Boolean = false): Unit = {
val optActualMetrics =
getSparkPlanMetrics(df, expectedNumOfJobs, expectedMetricsPredicates.keySet, enableWholeStage)
optActualMetrics.foreach { actualMetrics =>
assert(expectedMetricsPredicates.keySet === actualMetrics.keySet)
for ((nodeId, (expectedNodeName, expectedMetricsPredicatesMap))
<- expectedMetricsPredicates) {
val (actualNodeName, actualMetricsMap) = actualMetrics(nodeId)
assert(expectedNodeName === actualNodeName)
for ((metricName, metricPredicate) <- expectedMetricsPredicatesMap) {
assert(metricPredicate(actualMetricsMap(metricName)),
s"$nodeId / '$metricName' (= ${actualMetricsMap(metricName)}) did not match predicate.")
}
}
}
}
}
object InputOutputMetricsHelper {
private class InputOutputMetricsListener extends SparkListener {
private case class MetricsResult(
var recordsRead: Long = 0L,
var shuffleRecordsRead: Long = 0L,
var sumMaxOutputRows: Long = 0L)
private[this] val stageIdToMetricsResult = HashMap.empty[Int, MetricsResult]
def reset(): Unit = {
stageIdToMetricsResult.clear()
}
/**
* Return a list of recorded metrics aggregated per stage.
*
* The list is sorted in the ascending order on the stageId.
* For each recorded stage, the following tuple is returned:
* - sum of inputMetrics.recordsRead for all the tasks in the stage
* - sum of shuffleReadMetrics.recordsRead for all the tasks in the stage
* - sum of the highest values of "number of output rows" metric for all the tasks in the stage
*/
def getResults(): List[(Long, Long, Long)] = {
stageIdToMetricsResult.keySet.toList.sorted.map { stageId =>
val res = stageIdToMetricsResult(stageId)
(res.recordsRead, res.shuffleRecordsRead, res.sumMaxOutputRows)
}
}
override def onTaskEnd(taskEnd: SparkListenerTaskEnd): Unit = synchronized {
val res = stageIdToMetricsResult.getOrElseUpdate(taskEnd.stageId, MetricsResult())
res.recordsRead += taskEnd.taskMetrics.inputMetrics.recordsRead
res.shuffleRecordsRead += taskEnd.taskMetrics.shuffleReadMetrics.recordsRead
var maxOutputRows = 0L
for (accum <- taskEnd.taskMetrics.externalAccums) {
val info = accum.toInfo(Some(accum.value), None)
if (info.name.toString.contains("number of output rows")) {
info.update match {
case Some(n: Number) =>
if (n.longValue() > maxOutputRows) {
maxOutputRows = n.longValue()
}
case _ => // Ignore.
}
}
}
res.sumMaxOutputRows += maxOutputRows
}
}
// Run df.collect() and return aggregated metrics for each stage.
def run(df: DataFrame): List[(Long, Long, Long)] = {
val spark = df.sparkSession
val sparkContext = spark.sparkContext
val listener = new InputOutputMetricsListener()
sparkContext.addSparkListener(listener)
try {
sparkContext.listenerBus.waitUntilEmpty(5000)
listener.reset()
df.collect()
sparkContext.listenerBus.waitUntilEmpty(5000)
} finally {
sparkContext.removeSparkListener(listener)
}
listener.getResults()
}
}
| caneGuy/spark | sql/core/src/test/scala/org/apache/spark/sql/execution/metric/SQLMetricsTestUtils.scala | Scala | apache-2.0 | 12,493 |
package ch.ninecode.cim.tool
trait ProgramaticName
{
def name: String
def prefix (s: String): String = if (s.charAt(0).isDigit) "_" else ""
def substitute (s: String): String = s.replace(" ", "_").replace("-", "_").replace("/", "_").replace(".", "_").replace(",", "_")
lazy val valid_class_name: String =
{
val identifier = s"${prefix(name)}${substitute(name)}"
name match
{
// ToDo: all keywords
case "type" => "`type`"
case "val" => "`val`"
case "class" => "`class`"
case "%" => "percent"
case "length" => "len"
case "size" => "size1"
case "lock" => "lock1"
case "switch" => "`switch`"
case "char" => "`char`"
case "default" => "`default`"
case "native" => "`native`"
case "Boolean" => "`Boolean`"
case "String" => "`String`"
case "Unit" => "Unit_"
case "" => "unknown" // ToDo: WTF?
case _ =>
if (identifier.endsWith("_")) s"${identifier}1" else identifier
}
}
lazy val valid_attribute_name: String =
{
val identifier = s"${prefix(name)}${substitute(name)}"
name match
{
// ToDo: all keywords
case "type" => "`type`"
case "val" => "`val`"
case "class" => "`class`"
case "%" => "percent"
case "length" => "len"
case "size" => "size1"
case "lock" => "lock1"
case "switch" => "`switch`"
case "char" => "`char`"
case "default" => "`default`"
case "native" => "`native`"
case "" => "unknown" // ToDo: WTF?
case _ =>
if (identifier.endsWith("_")) s"${identifier}1" else identifier
}
}
}
| derrickoswald/CIMScala | CIMTool/src/main/scala/ch/ninecode/cim/tool/ProgramaticName.scala | Scala | mit | 1,893 |
package com.codahale.jerkson.util
import com.codahale.jerkson.util.scalax.rules.scalasig._
import com.fasterxml.jackson.databind.JavaType
import com.fasterxml.jackson.databind.`type`.TypeFactory
import scala.reflect.ScalaSignature
import scala.reflect.internal.pickling.ByteCodecs
class MissingPickledSig(clazz: Class[_]) extends Error("Failed to parse pickled Scala signature from: %s".format(clazz))
class MissingExpectedType(clazz: Class[_]) extends Error(
"Parsed pickled Scala signature, but no expected type found: %s"
.format(clazz)
)
object CaseClassSigParser {
val SCALA_SIG = "ScalaSig"
val SCALA_SIG_ANNOTATION = "Lscala/reflect/ScalaSignature;"
val BYTES_VALUE = "bytes"
private def parseClassFileFromByteCode(clazz: Class[_]): Option[ClassFile] = try {
// taken from ScalaSigParser parse method with the explicit purpose of walking away from NPE
val byteCode = ByteCode.forClass(clazz)
Option(ClassFileParser.parse(byteCode))
}
catch {
case e: NullPointerException => None // yes, this is the exception, but it is totally unhelpful to the end user
}
private def parseByteCodeFromAnnotation(clazz: Class[_]): Option[ByteCode] = {
if (clazz.isAnnotationPresent(classOf[ScalaSignature])) {
val sig = clazz.getAnnotation(classOf[ScalaSignature])
val bytes = sig.bytes.getBytes("UTF-8")
val len = ByteCodecs.decode(bytes)
Option(ByteCode(bytes.take(len)))
} else {
None
}
}
private def parseScalaSig(_clazz: Class[_], classLoader: ClassLoader): Option[ScalaSig] = {
val clazz = findRootClass(_clazz, classLoader)
parseClassFileFromByteCode(clazz).map(ScalaSigParser.parse(_)).getOrElse(None) orElse
parseByteCodeFromAnnotation(clazz).map(ScalaSigAttributeParsers.parse(_)) orElse
None
}
protected def findRootClass(klass: Class[_], classLoader: ClassLoader) =
loadClass(klass.getName.split("\\\\$").head, classLoader)
protected def simpleName(klass: Class[_]) =
klass.getName.split("\\\\$").last
protected def findSym[A](clazz: Class[A], classLoader: ClassLoader) = {
val name = simpleName(clazz)
val pss = parseScalaSig(clazz, classLoader)
pss match {
case Some(x) => {
val topLevelClasses = x.topLevelClasses
topLevelClasses.headOption match {
case Some(tlc) => {
tlc
}
case None => {
val topLevelObjects = x.topLevelObjects
topLevelObjects.headOption match {
case Some(tlo) => {
x.symbols.find { s => !s.isModule && s.name == name } match {
case Some(s) => s.asInstanceOf[ClassSymbol]
case None => throw new MissingExpectedType(clazz)
}
}
case _ => throw new MissingExpectedType(clazz)
}
}
}
}
case None => throw new MissingPickledSig(clazz)
}
}
def parse[A](clazz: Class[A], factory: TypeFactory, classLoader: ClassLoader) = {
findSym(clazz, classLoader).children.filter(c => c.isCaseAccessor && !c.isPrivate)
.flatMap { ms =>
ms.asInstanceOf[MethodSymbol].infoType match {
case NullaryMethodType(t: TypeRefType) => ms.name -> typeRef2JavaType(t, factory, classLoader) :: Nil
case _ => Nil
}
}
}
protected def typeRef2JavaType(ref: TypeRefType, factory: TypeFactory, classLoader: ClassLoader): JavaType = {
try {
if (ref.symbol.path == "scala.Array") {
val elementType = typeRef2JavaType(ref.typeArgs.head.asInstanceOf[TypeRefType], factory, classLoader)
val realElementType = elementType.getRawClass.getName match {
case "java.lang.Boolean" => classOf[Boolean]
case "java.lang.Byte" => classOf[Byte]
case "java.lang.Character" => classOf[Char]
case "java.lang.Double" => classOf[Double]
case "java.lang.Float" => classOf[Float]
case "java.lang.Integer" => classOf[Int]
case "java.lang.Long" => classOf[Long]
case "java.lang.Short" => classOf[Short]
case _ => elementType.getRawClass
}
val array = java.lang.reflect.Array.newInstance(realElementType, 0)
factory.constructType(array.getClass)
} else {
val klass = loadClass(ref.symbol.path, classLoader)
factory.constructParametricType(
klass, ref.typeArgs.map {
t => typeRef2JavaType(t.asInstanceOf[TypeRefType], factory, classLoader)
}: _*
)
}
} catch {
case e: Throwable => {
e.printStackTrace()
null
}
}
}
protected def loadClass(path: String, classLoader: ClassLoader) = path match {
case "scala.Predef.Map" => classOf[Map[_, _]]
case "scala.Predef.Set" => classOf[Set[_]]
case "scala.Predef.String" => classOf[String]
case "scala.package.List" => classOf[List[_]]
case "scala.package.Seq" => classOf[Seq[_]]
case "scala.package.Sequence" => classOf[Seq[_]]
case "scala.package.Collection" => classOf[Seq[_]]
case "scala.package.IndexedSeq" => classOf[IndexedSeq[_]]
case "scala.package.RandomAccessSeq" => classOf[IndexedSeq[_]]
case "scala.package.Iterable" => classOf[Iterable[_]]
case "scala.package.Iterator" => classOf[Iterator[_]]
case "scala.package.Vector" => classOf[Vector[_]]
case "scala.package.BigDecimal" => classOf[BigDecimal]
case "scala.package.BigInt" => classOf[BigInt]
case "scala.package.Integer" => classOf[java.lang.Integer]
case "scala.package.Character" => classOf[java.lang.Character]
case "scala.Long" => classOf[java.lang.Long]
case "scala.Int" => classOf[java.lang.Integer]
case "scala.Boolean" => classOf[java.lang.Boolean]
case "scala.Short" => classOf[java.lang.Short]
case "scala.Byte" => classOf[java.lang.Byte]
case "scala.Float" => classOf[java.lang.Float]
case "scala.Double" => classOf[java.lang.Double]
case "scala.Char" => classOf[java.lang.Character]
case "scala.Any" => classOf[Any]
case "scala.AnyRef" => classOf[AnyRef]
case name => classLoader.loadClass(name)
}
}
| gilt/jerkson | src/main/scala/com/codahale/jerkson/util/CaseClassSigParser.scala | Scala | mit | 6,178 |
/**
* Copyright (C) 2010-2011 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.kernel.notifications
import net.lshift.diffa.kernel.config.User
/**
* Provides an abstraction for different types of notification appenders
*/
trait NotificationProvider {
/**
* Emit an event that should get relayed to a downstream recipient
*/
def notify(event:NotificationEvent, user:String) : Unit
} | lshift/diffa | kernel/src/main/scala/net/lshift/diffa/kernel/notifications/NotificationProvider.scala | Scala | apache-2.0 | 981 |
//
// Game Gardens - a platform for hosting simple multiplayer Java games
// Copyright (c) 2005-2013, Three Rings Design, Inc. - All rights reserved.
// https://github.com/threerings/game-gardens/blob/master/LICENSE
package com.threerings.gardens.web.logic
import java.io.File
import java.security.MessageDigest
import java.sql.Date
import java.util.Iterator
import javax.servlet.http.HttpServletRequest
import com.google.inject.Inject
import com.samskivert.servlet.user.User
import com.samskivert.servlet.util.FriendlyException
import com.samskivert.velocity.InvocationContext
import org.apache.commons.fileupload.FileItem
import org.apache.commons.fileupload.disk.DiskFileItemFactory
import org.apache.commons.fileupload.servlet.ServletFileUpload
import com.threerings.getdown.data.Resource
import com.threerings.toybox.data.GameDefinition
import com.threerings.toybox.server.ToyBoxConfig
import com.threerings.toybox.server.persist.GameRecord
import com.threerings.gardens.web.GardensApp
/** Handles the updating of a game's jar file. */
class upload_jar @Inject() (config :ToyBoxConfig) extends UserLogic {
override def invoke (ctx :InvocationContext, app :GardensApp, user :User) {
val req = ctx.getRequest
// TODO: check disk usage, set max size to current quota
val fact = new DiskFileItemFactory(4096, new File("/tmp"))
val fu = new ServletFileUpload(fact)
fu.setSizeMax(MAX_GAME_JAR_SIZE)
val iter = fu.parseRequest(req).iterator
// the first item should be the gameid
val item = iter.next.asInstanceOf[FileItem]
if (item == null || !item.getFieldName.equals("gameid")) {
_log.warning(s"upload_jar: Invalid first item: ${toString(item)}.")
throw new FriendlyException("error.internal_error")
}
val gameId = try {
Integer.parseInt(item.getString)
} catch {
case e :Exception => throw new FriendlyException("error.invalid_gameid")
}
// now load up the associated game record
val game = if (gameId == 0) null else app.toyBoxRepo.loadGame(gameId)
if (game == null) {
throw new FriendlyException("error.no_such_game")
}
// we'll use this later
ctx.put("gameid", gameId)
// get a handle on the game definition
val gamedef = game.parseGameDefinition
val md = MessageDigest.getInstance("MD5")
// TODO: put game jars in gameId subdirectories
// determine where we will be uploading the jar file
val gdir = config.getResourceDir
_log.info(s"Uploading jar for '${gamedef.ident}'.")
// the next item should be the jar file itself
val jitem = iter.next.asInstanceOf[FileItem]
if (jitem == null || !jitem.getFieldName.equals("jar")) {
_log.warning(s"upload_jar: Invalid second item: ${toString(item)}.")
throw new FriendlyException("error.internal_error")
}
if (item.getSize == 0) {
throw new FriendlyException("upload_jar.error.missing_jar")
}
val jar = new File(gdir, gamedef.getMediaPath(gameId))
jitem.write(jar)
_log.info(s"Wrote $jar.")
// compute the digest
val digest = Resource.computeDigest(jar, md, null)
if (!digest.equals(game.digest)) {
game.digest = digest
game.lastUpdated = new Date(System.currentTimeMillis)
// if the game was pending upgrade it to ready now that it has
// a jar file
if (game.getStatus == GameRecord.Status.PENDING) {
game.setStatus(GameRecord.Status.READY)
}
// finally update the game record
app.toyBoxRepo.updateGame(game)
}
ctx.put("status", "upload_jar.updated")
}
protected def toString (item :FileItem) = item match {
case null => "null"
case _ => s"[field=${item.getFieldName}, size=${item.getSize}, type=${item.getContentType}]"
}
/** TODO: move this into the config. */
protected final val MAX_GAME_JAR_SIZE = 1024 * 1024 * 1024
private val _log = java.util.logging.Logger.getLogger("gardens")
}
| threerings/game-gardens | server/src/main/scala/com/threerings/gardens/web/logic/upload_jar.scala | Scala | bsd-3-clause | 3,948 |
package sangria.parser
/** Lexical analysis tools described in the GraphQL specification. */
private[parser] object Lexical {
/** Regex that matches line termination sequences. */
private[this] val newlineRegex = """\\r\\n|[\\n\\r]""".r
/** Produces the value of a block string from its parsed raw value, similar to Coffeescript's
* block string, Python's docstring trim or Ruby's strip_heredoc.
*
* This implements the GraphQL spec's BlockStringValue() static algorithm.
*
* @see
* [[https://spec.graphql.org/October2021/#BlockStringValue()]]
*/
def blockStringValue(rawValue: String): String = {
val lines = newlineRegex.split(rawValue)
val lineSizes = lines.map(l => l -> leadingWhitespace(l))
val commonIndentLines =
lineSizes.drop(1).collect { case (line, size) if size != line.length => size }
val strippedLines = if (commonIndentLines.nonEmpty) {
val commonIndent = commonIndentLines.min
lines.take(1) ++ lines.drop(1).map(_.drop(commonIndent))
} else lines
val trimmedLines = strippedLines.reverse.dropWhile(isBlank).reverse.dropWhile(isBlank)
trimmedLines.mkString("\\n")
}
private[this] def leadingWhitespace(str: String) = {
var i = 0
while (i < str.length && (str.charAt(i) == ' ' || str.charAt(i) == '\\t')) i += 1
i
}
private[this] def isBlank(str: String) = leadingWhitespace(str) == str.length
}
| sangria-graphql/sangria | modules/parser/src/main/scala/sangria/parser/Lexical.scala | Scala | apache-2.0 | 1,416 |
package polyite.fitness.classifier
import polyite.ScopInfo
import polyite.schedule.Dependence
import polyite.schedule.DomainCoeffInfo
import polyite.schedule.schedule_tree.util.SchedTreeUtil
import polyite.schedule.schedule_tree.ScheduleNode
import polyite.schedule.ScheduleSpaceUtils
import isl.Isl
import org.junit.Assert._
import polyite.schedule.schedule_tree.ScheduleTreeConstruction
import org.junit.Test
import polyite.util.SCoPMetrics
import polyite.AbstractTest
import polyite.fitness.ParallelLoops
class TestParallelLoops extends AbstractTest {
@Test
def testSeq() {
val scop : ScopInfo = new ScopInfo().setParams(isl.Set.readFromStr(Isl.ctx, "[n] -> { : 0 < n }"))
.addDomain(isl.Set.readFromStr(Isl.ctx, "[n] -> { S[i, j] : 0 <=i <= n and 0 <= j <= n }"))
.addSchedule(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i, j] -> [i, j] }"))
.addWrs(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i,j] -> A[i, j] }"))
.addRds(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i,j] -> A[i, j - 1] }"))
.addRds(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i,j] -> A[i - 1, j] }"))
val (deps : Set[Dependence], domInfo : DomainCoeffInfo) = ScheduleSpaceUtils.calcDepsAndDomInfo(scop)
val sched : ScheduleNode = SchedTreeUtil.markLoops(SchedTreeUtil.simplifySchedTree(ScheduleTreeConstruction.islUnionMap2BasicScheduleTree(scop.getSched, domInfo, scop, deps, false, true), deps))
val scopMetrics : SCoPMetrics = SCoPMetrics.apply(deps.size, 1, 0, 1, 2)
val fVal : Double = ParallelLoops.calc(sched, super.createTestConfig().get, scop, scopMetrics, domInfo, deps)
println(sched)
println("no parallelism: " + fVal)
assertEquals(0.0, fVal, 0)
}
@Test
def testMiddleParMustSplit() {
val scop : ScopInfo = new ScopInfo().setParams(isl.Set.readFromStr(Isl.ctx, "[n] -> { : 0 < n }"))
.addDomain(isl.Set.readFromStr(Isl.ctx, "[n] -> { S[i, j, k] : 0 <=i <= n and 0 <= j <= n and 0 <= k <= n }"))
.addSchedule(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i, j, k] -> [i, j, k] }"))
.addWrs(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i,j, k] -> A[0, j, 0] : i = 0 and k = 0 }"))
.addRds(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i,j, k] -> A[0, j, 0] }"))
val (deps : Set[Dependence], domInfo : DomainCoeffInfo) = ScheduleSpaceUtils.calcDepsAndDomInfo(scop)
val sched : ScheduleNode = SchedTreeUtil.markLoops(SchedTreeUtil.simplifySchedTree(ScheduleTreeConstruction.islUnionMap2BasicScheduleTree(scop.getSched, domInfo, scop, deps, false, true), deps))
val scopMetrics : SCoPMetrics = SCoPMetrics.apply(deps.size, 1, 0, 1, 2)
val fVal : Double = ParallelLoops.calc(sched, super.createTestConfig().get, scop, scopMetrics, domInfo, deps)
println(sched)
println("middle par must split: " + fVal)
assertEquals(2.0 / 3.0, fVal, 0)
}
@Test
def testInnerPar() {
val scop : ScopInfo = new ScopInfo().setParams(isl.Set.readFromStr(Isl.ctx, "[n] -> { : 0 < n }"))
.addDomain(isl.Set.readFromStr(Isl.ctx, "[n] -> { S[i, j] : 0 <=i <= n and 0 <= j <= n }"))
.addSchedule(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i, j] -> [i + j, j] }"))
.addWrs(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i,j] -> A[i, j] }"))
.addRds(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i,j] -> A[i, j - 1] }"))
.addRds(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i,j] -> A[i - 1, j] }"))
val (deps : Set[Dependence], domInfo : DomainCoeffInfo) = ScheduleSpaceUtils.calcDepsAndDomInfo(scop)
val sched : ScheduleNode = SchedTreeUtil.markLoops(SchedTreeUtil.simplifySchedTree(ScheduleTreeConstruction.islUnionMap2BasicScheduleTree(scop.getSched, domInfo, scop, deps, false, true), deps))
val scopMetrics : SCoPMetrics = SCoPMetrics.apply(deps.size, 1, 0, 1, 2)
val fVal : Double = ParallelLoops.calc(sched, super.createTestConfig().get, scop, scopMetrics, domInfo, deps)
println(sched)
println("inner parallel: " + fVal)
assertEquals(0.5, fVal, 0)
}
@Test
def testInnerParSkewed() {
val scop : ScopInfo = new ScopInfo().setParams(isl.Set.readFromStr(Isl.ctx, "[n] -> { : 0 < n }"))
.addDomain(isl.Set.readFromStr(Isl.ctx, "[n] -> { S[i, j] : 0 <=i <= n and 0 <= j <= n }"))
.addSchedule(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i, j] -> [i, j] }"))
.addWrs(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i,j] -> A[i, j] }"))
.addRds(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i,j] -> A[i - 1, j - 1] }"))
val (deps : Set[Dependence], domInfo : DomainCoeffInfo) = ScheduleSpaceUtils.calcDepsAndDomInfo(scop)
val sched : ScheduleNode = SchedTreeUtil.markLoops(SchedTreeUtil.simplifySchedTree(ScheduleTreeConstruction.islUnionMap2BasicScheduleTree(scop.getSched, domInfo, scop, deps, false, true), deps))
val scopMetrics : SCoPMetrics = SCoPMetrics.apply(deps.size, 1, 0, 1, 2)
val fVal : Double = ParallelLoops.calc(sched, super.createTestConfig().get, scop, scopMetrics, domInfo, deps)
println(sched)
println("inner parallel skewed: " + fVal)
assertEquals(0.5, fVal, 0)
}
@Test
def testOuterPar() {
val scop : ScopInfo = new ScopInfo().setParams(isl.Set.readFromStr(Isl.ctx, "[n] -> { : 0 < n }"))
.addDomain(isl.Set.readFromStr(Isl.ctx, "[n] -> { S[i, j] : 0 <=i <= n and 0 <= j <= n }"))
.addSchedule(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i, j] -> [i - j, j] }"))
.addWrs(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i,j] -> A[i, j] }"))
.addRds(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i,j] -> A[i - 1, j - 1] }"))
val (deps : Set[Dependence], domInfo : DomainCoeffInfo) = ScheduleSpaceUtils.calcDepsAndDomInfo(scop)
val sched : ScheduleNode = SchedTreeUtil.markLoops(SchedTreeUtil.simplifySchedTree(ScheduleTreeConstruction.islUnionMap2BasicScheduleTree(scop.getSched, domInfo, scop, deps, false, true), deps))
val scopMetrics : SCoPMetrics = SCoPMetrics.apply(deps.size, 1, 0, 1, 2)
val fVal : Double = ParallelLoops.calc(sched, super.createTestConfig().get, scop, scopMetrics, domInfo, deps)
println(sched)
println("inner parallel: " + fVal)
assertEquals(1, fVal, 0)
}
@Test
def testWithSeqOneInnerPar() {
val scop : ScopInfo = new ScopInfo().setParams(isl.Set.readFromStr(Isl.ctx, "[n] -> { : 0 < n }"))
.addDomain(isl.Set.readFromStr(Isl.ctx, "[n] -> { S[i, j] : 0 <=i <= n and 0 <= j <= n }"))
.addSchedule(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i, j] -> [i, 0, j] }"))
.addWrs(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i,j] -> A[i, j] }"))
.addRds(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i,j] -> A[i - 1, j] }"))
.addDomain(isl.Set.readFromStr(Isl.ctx, "[n] -> { T[i, j] : 0 <=i <= n and 0 <= j <= n }"))
.addSchedule(isl.Map.readFromStr(Isl.ctx, "[n] -> { T[i, j] -> [i, 1, j] }"))
.addWrs(isl.Map.readFromStr(Isl.ctx, "[n] -> { T[i,j] -> B[i, j] }"))
.addRds(isl.Map.readFromStr(Isl.ctx, "[n] -> { T[i,j] -> B[i, j - 1] }"))
val (deps : Set[Dependence], domInfo : DomainCoeffInfo) = ScheduleSpaceUtils.calcDepsAndDomInfo(scop)
val sched : ScheduleNode = SchedTreeUtil.markLoops(SchedTreeUtil.simplifySchedTree(ScheduleTreeConstruction.islUnionMap2BasicScheduleTree(scop.getSched, domInfo, scop, deps, false, true), deps))
val scopMetrics : SCoPMetrics = SCoPMetrics.apply(deps.size, 2, 0, 1, 2)
val fVal : Double = ParallelLoops.calc(sched, super.createTestConfig().get, scop, scopMetrics, domInfo, deps)
println(sched)
println("with seq, one inner par: " + fVal)
assertEquals(0.25, fVal, 0)
}
@Test
def testWithSeqTwoInnerPar() {
val scop : ScopInfo = new ScopInfo().setParams(isl.Set.readFromStr(Isl.ctx, "[n] -> { : 0 < n }"))
.addDomain(isl.Set.readFromStr(Isl.ctx, "[n] -> { S[i, j] : 0 <=i <= n and 0 <= j <= n }"))
.addSchedule(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i, j] -> [i, 0, j] }"))
.addWrs(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i,j] -> A[i, j] }"))
.addRds(isl.Map.readFromStr(Isl.ctx, "[n] -> { S[i,j] -> A[i - 1, j] }"))
.addDomain(isl.Set.readFromStr(Isl.ctx, "[n] -> { T[i, j] : 0 <=i <= n and 0 <= j <= n }"))
.addSchedule(isl.Map.readFromStr(Isl.ctx, "[n] -> { T[i, j] -> [i, 1, j] }"))
.addWrs(isl.Map.readFromStr(Isl.ctx, "[n] -> { T[i,j] -> B[i] }"))
.addRds(isl.Map.readFromStr(Isl.ctx, "[n] -> { T[i,j] -> B[i] }"))
val (deps : Set[Dependence], domInfo : DomainCoeffInfo) = ScheduleSpaceUtils.calcDepsAndDomInfo(scop)
val sched : ScheduleNode = SchedTreeUtil.markLoops(SchedTreeUtil.simplifySchedTree(ScheduleTreeConstruction.islUnionMap2BasicScheduleTree(scop.getSched, domInfo, scop, deps, false, true), deps))
val scopMetrics : SCoPMetrics = SCoPMetrics.apply(deps.size, 2, 0, 1, 2)
val fVal : Double = ParallelLoops.calc(sched, super.createTestConfig().get, scop, scopMetrics, domInfo, deps)
println(sched)
println("with seq, two inner par: " + fVal)
assertEquals(0.25, fVal, 0)
}
} | stganser/polyite | test/polyite/fitness/classifier/TestParallelLoops.scala | Scala | mit | 8,939 |
package forcomp
import common._
object Anagrams {
/** A word is simply a `String`. */
type Word = String
/** A sentence is a `List` of words. */
type Sentence = List[Word]
/** `Occurrences` is a `List` of pairs of characters and positive integers saying
* how often the character appears.
* This list is sorted alphabetically w.r.t. to the character in each pair.
* All characters in the occurrence list are lowercase.
*
* Any list of pairs of lowercase characters and their frequency which is not sorted
* is **not** an occurrence list.
*
* Note: If the frequency of some character is zero, then that character should not be
* in the list.
*/
type Occurrences = List[(Char, Int)]
/** The dictionary is simply a sequence of words.
* It is predefined and obtained as a sequence using the utility method `loadDictionary`.
*/
val dictionary: List[Word] = loadDictionary
/** Converts the word into its character occurence list.
*
* Note: the uppercase and lowercase version of the character are treated as the
* same character, and are represented as a lowercase character in the occurrence list.
*/
def wordOccurrences(w: Word): Occurrences = {
val lower_w = w.toLowerCase
for (i <- lower_w.distinct) yield (i, lower_w.count(_ == i))
}.toList.sorted
/** Converts a sentence into its character occurrence list. */
def sentenceOccurrences(s: Sentence): Occurrences =
wordOccurrences(s.mkString(""))
/** The `dictionaryByOccurrences` is a `Map` from different occurrences to a sequence of all
* the words that have that occurrence count.
* This map serves as an easy way to obtain all the anagrams of a word given its occurrence list.
*
* For example, the word "eat" has the following character occurrence list:
*
* `List(('a', 1), ('e', 1), ('t', 1))`
*
* Incidentally, so do the words "ate" and "tea".
*
* This means that the `dictionaryByOccurrences` map will contain an entry:
*
* List(('a', 1), ('e', 1), ('t', 1)) -> Seq("ate", "eat", "tea")
*
*/
lazy val dictionaryByOccurrences: Map[Occurrences, List[Word]] =
dictionary.groupBy(wordOccurrences(_)) // 1.289s
/** Returns all the anagrams of a given word. */
def wordAnagrams(word: Word): List[Word] = dictionaryByOccurrences
.get(wordOccurrences(word)) match {
case Some(x) => x
case None => Nil
}
/** Returns the list of all subsets of the occurrence list.
* This includes the occurrence itself, i.e. `List(('k', 1), ('o', 1))`
* is a subset of `List(('k', 1), ('o', 1))`.
* It also include the empty subset `List()`.
*
* Example: the subsets of the occurrence list `List(('a', 2), ('b', 2))` are:
*
* List(
* List(),
* List(('a', 1)),
* List(('a', 2)),
* List(('b', 1)),
* List(('a', 1), ('b', 1)),
* List(('a', 2), ('b', 1)),
* List(('b', 2)),
* List(('a', 1), ('b', 2)),
* List(('a', 2), ('b', 2))
* )
*
* Note that the order of the occurrence list subsets does not matter -- the subsets
* in the example above could have been displayed in some other order.
*/
def combinations(occurrences: Occurrences): List[Occurrences] = {
val s = occurrences
.map(x => (1 to x._2).map(y => (x._1, y)).toList)
val s2 = s.map(x => x.toSet.subsets.filter(_.size <= 1).toSet).toSet
s2.size match {
case 0 => List(Nil)
case _ =>
s2.reduceLeft((a, b) => {
for (i <- a; j <- b) yield Set(i, j).flatten
}).map(s => s.toList.sorted).toList
}
}.toList
/** Subtracts occurrence list `y` from occurrence list `x`.
*
* The precondition is that the occurrence list `y` is a subset of
* the occurrence list `x` -- any character appearing in `y` must
* appear in `x`, and its frequency in `y` must be smaller or equal
* than its frequency in `x`.
*
* Note: the resulting value is an occurrence - meaning it is sorted
* and has no zero-entries.
*/
def occurrencesToString(n: Occurrences): String =
n.foldLeft("")((a,b) => a + b._1.toString * b._2)
def subtract(x: Occurrences, y: Occurrences): Occurrences =
y.foldLeft(x.toMap) { (last_xmap, y_occurrence) =>
val (y_k, y_v) = y_occurrence
if (last_xmap.contains(y_k)) {
val x_v = last_xmap.apply(y_k)
if (x_v - y_v > 0)
last_xmap.updated(y_k, x_v - y_v)
else
last_xmap - y_k
} else
last_xmap
}.toList.sorted
/** Returns a list of all anagram sentences of the given sentence.
*
* An anagram of a sentence is formed by taking the occurrences of all the characters of
* all the words in the sentence, and producing all possible combinations of words with those characters,
* such that the words have to be from the dictionary.
*
* The number of words in the sentence and its anagrams does not have to correspond.
* For example, the sentence `List("I", "love", "you")` is an anagram of the sentence `List("You", "olive")`.
*
* Also, two sentences with the same words but in a different order are considered two different anagrams.
* For example, sentences `List("You", "olive")` and `List("olive", "you")` are different anagrams of
* `List("I", "love", "you")`.
*
* Here is a full example of a sentence `List("Yes", "man")` and its anagrams for our dictionary:
*
* List(
* List(en, as, my),
* List(en, my, as),
* List(man, yes),
* List(men, say),
* List(as, en, my),
* List(as, my, en),
* List(sane, my),
* List(Sean, my),
* List(my, en, as),
* List(my, as, en),
* List(my, sane),
* List(my, Sean),
* List(say, men),
* List(yes, man)
* )
*
* The different sentences do not have to be output in the order shown above - any order is fine as long as
* all the anagrams are there. Every returned word has to exist in the dictionary.
*
* Note: in case that the words of the sentence are in the dictionary, then the sentence is the anagram of itself,
* so it has to be returned in this list.
*
* Note: There is only one anagram of an empty sentence.
*/
def sentenceAnagrams(sentence: Sentence): List[Sentence] = sentence match {
case Nil => Nil
case _ => {
def helper(sentenceOccurrence: Occurrences): List[Sentence] = {
if (sentenceOccurrence.size == 0)
Nil
else {
for {
prefix <- combinations(sentenceOccurrence)
wordA <- wordAnagrams(occurrencesToString(prefix))
rest <- helper(subtract(sentenceOccurrence, prefix))
_ = println(prefix, wordA, rest)
} yield wordA :: rest
}.toList
}
helper(sentenceOccurrences(sentence))
}
}
}
| au9ustine/org.coursera.progfun-005 | assignments/forcomp/src/main/scala/forcomp/Anagrams.scala | Scala | mit | 6,964 |
package bio4j.data.uniprot.flat
import bio4j.data.uniprot._
case class CC(val lines: Seq[String]) {
def comments: Seq[Comment] =
commentBlocks(lines) flatMap commentFromBlock
private def commentFromBlock(blockLines: Seq[String]): Seq[Comment] = {
val (topic, _headContent) = blockLines.head.span(_!=':')
val contents: Seq[String] = _headContent.stripPrefix(":").trim +: blockLines.tail
topic match {
case "ALLERGEN" => List( Allergen(contents.mkString(" ")) )
case "ALTERNATIVE PRODUCTS" => isoformBlocks(contents.tail) map isoformFromBlock
case "BIOPHYSICOCHEMICAL PROPERTIES" => List( BiophysicochemicalProperties(contents.mkString(" ")) )
case "BIOTECHNOLOGY" => List( Biotechnology(contents.mkString(" ")) )
case "CATALYTIC ACTIVITY" => List( CatalyticActivity(contents.mkString(" ")) )
case "CAUTION" => List( Caution(contents.mkString(" ")) )
case "COFACTOR" => List( Cofactor(contents.mkString(" ")) )
case "DEVELOPMENTAL STAGE" => List( DevelopmentalStage(contents.mkString(" ")) )
case "DISEASE" => List( Disease(contents.mkString(" ")) )
case "DISRUPTION PHENOTYPE" => List( DisruptionPhenotype(contents.mkString(" ")) )
case "DOMAIN" => List( Domain(contents.mkString(" ")) )
case "ENZYME REGULATION" => List( EnzymeRegulation(contents.mkString(" ")) )
case "FUNCTION" => List( Function(contents.mkString(" ")) )
case "INDUCTION" => List( Induction(contents.mkString(" ")) )
case "INTERACTION" => List( Interaction(contents.mkString(" ")) )
case "MASS SPECTROMETRY" => List( MassSpectrometry(contents.mkString(" ")) )
case "MISCELLANEOUS" => List( Miscellaneous(contents.mkString(" ")) )
case "PATHWAY" => List( Pathway(contents.mkString(" ")) )
case "PHARMACEUTICAL" => List( Pharmaceutical(contents.mkString(" ")) )
case "POLYMORPHISM" => List( Polymorphism(contents.mkString(" ")) )
case "PTM" => List( PTM(contents.mkString(" ")) )
case "RNA EDITING" => List( RNAEditing(contents.mkString(" ")) )
case "SEQUENCE CAUTION" => List( SequenceCaution(contents.mkString(" ")) )
case "SIMILARITY" => List( Similarity(contents.mkString(" ")) )
case "SUBCELLULAR LOCATION" => List( SubcellularLocation(contents.mkString(" ")) )
case "SUBUNIT" => List( Subunit(contents.mkString(" ")) )
case "TISSUE SPECIFICITY" => List( TissueSpecificity(contents.mkString(" ")) )
case "TOXIC DOSE" => List( ToxicDose(contents.mkString(" ")) )
case "WEB RESOURCE" => List( WebResource(contents.mkString(" ")) )
}
}
private def commentBlocks(commentLines: Seq[String]): Seq[Seq[String]] =
commentLines.foldLeft[collection.mutable.Buffer[Seq[String]]](new collection.mutable.UnrolledBuffer[Seq[String]]){ (acc: collection.mutable.Buffer[Seq[String]], line: String) =>
// extra lines for a comment
if(line startsWith " ") {
acc.updated(acc.length - 1, acc.last :+ line.trim)
}
else {
acc += List(line.stripPrefix("-!-").trim)
}
}
/*
Isoforms sample
```
-!- ALTERNATIVE PRODUCTS:
Event=Alternative splicing, Alternative initiation; Named isoforms=8;
Comment=Additional isoforms seem to exist;
Name=1; Synonyms=Non-muscle isozyme;
IsoId=Q15746-1; Sequence=Displayed;
Name=2;
IsoId=Q15746-2; Sequence=VSP_004791;
Name=3A;
IsoId=Q15746-3; Sequence=VSP_004792, VSP_004794;
Name=3B;
IsoId=Q15746-4; Sequence=VSP_004791, VSP_004792, VSP_004794;
Name=4;
IsoId=Q15746-5; Sequence=VSP_004792, VSP_004793;
```
The input here has lines **already trimmed**.
*/
private def isoformBlocks(altProdLines: Seq[String]): Seq[Seq[String]] =
altProdLines
.dropWhile(altProdLine => !altProdLine.startsWith("Name="))
.foldLeft(new collection.mutable.UnrolledBuffer[Seq[String]]()){ (acc: collection.mutable.UnrolledBuffer[Seq[String]], line: String) =>
// same iso
if(!(line startsWith "Name="))
acc.updated(acc.length - 1, acc.last :+ line.trim)
else
acc += List(line.trim)
}
/*
The input here is assumed to be
```
Name=1; Synonyms=Non-muscle isozyme;
IsoId=Q15746-1; Sequence=Displayed;
Name=2;
IsoId=Q15746-2; Sequence=VSP_004791;
Name=3A;
IsoId=Q15746-3; Sequence=VSP_004792, VSP_004794;
```
*/
private def isoformFromBlock(isoLines: Seq[String]): Isoform =
Isoform(
name = isoLines.head.stripPrefix("Name=").takeWhile(_!=';'),
id = isoLines.tail.head.stripPrefix("IsoId=").takeWhile(_!=';'),
isEntry = isoLines.tail.head.containsSlice("Sequence=Displayed")
)
}
| bio4j/data.uniprot | src/main/scala/flat/CC.scala | Scala | agpl-3.0 | 5,268 |
import shapeless._
import shapeless.poly._
import shapeless.ops.hlist._
import scala.util.{Success, Try}
object Day1 extends App {
{
val xs = 32 :: "test" :: 66 :: HNil
object m extends Poly1 {
implicit def caseString = at[String](_ + "mapped")
implicit def caseInt = at[Int](_ + 100)
}
val xsm: Int :: String :: Int :: HNil = xs.map(m)
println(xsm) // 132 :: testmapped :: 166 :: HNil
}
{
val xs = 32 :: "test" ::(150, "abcd") :: HNil
object size extends Poly1 {
implicit def caseInt = at[Int](x β x)
implicit def caseString = at[String](_.length)
implicit def caseTuple[A, B](
implicit ca: Case.Aux[A, Int],
cb: Case.Aux[B, Int]) = at[(A, B)](x β ca(x._1) + cb(x._2))
}
val xsm: Int :: Int :: Int :: HNil = xs.map(size)
println(xsm) // 32 :: 4 :: 154 :: HNil
}
{
val xs = 32 ::("single", 12) :: "abc" :: (150 :: "abcd" :: "e" :: HNil) :: HNil
object size extends Poly1 {
implicit def caseInt = at[Int](x β x)
implicit def caseString = at[String](_.length)
implicit def caseHList[A <: HList](implicit m: Mapper[this.type, A]) = at[A](_.map(this))
implicit def caseTuple[A <: Product, L <: HList](implicit g: Generic.Aux[A, L],
m: Mapper[this.type, L]) =
at[A](a β g.to(a).map(this))
}
val xsm = xs.map(size)
println(xsm) // 32 :: 6 :: 12 :: HNil :: 3 :: 1 :: HNil :: HNil
val ys = 1 :: (2 :: ("abc" :: HNil) :: HNil) :: HNil
val ysm = ys.map(size)
println(ysm) // 1 :: 2 :: 3 :: HNil :: HNil :: HNil
}
{
println(1 :: (2 :: HNil) :: ("a" :: HNil) :: HNil)
//1 :: 2 :: HNil :: a :: HNil :: HNil
}
{
val xs = 32 :: "abc" :: HNil
object const extends (Id ~> Const[String]#Ξ») {
def apply[T](f: Id[T]): Const[String]#Ξ»[T] = "c"
}
val xsm: String :: String :: HNil = xs.map(const)
println(xsm) // c :: c :: HNil
val xsm2: String :: String :: HNil = xs.mapConst("c")
println(xsm2) // c :: c :: HNil
}
{
val xs = 1 :: "some" :: HNil
object lift extends (Id ~> Try) {
override def apply[T](f: Id[T]): Try[T] = Success(f)
}
val xsl: Try[Int] :: Try[String] :: HNil = xs.map(lift)
object change extends (Try ~> Option) {
override def apply[T](f: Try[T]): Option[T] = f.toOption
}
val xso: Option[Int] :: Option[String] :: HNil = xsl.map(change)
object extract extends (Option ~> Id) {
override def apply[T](f: Option[T]): Id[T] = f.get
}
val xse: Int :: String :: HNil = xso.map(extract)
println(xse) // 1 :: some :: HNil
}
{
val xs = 1 :: "some" :: HNil
object size extends Poly1 {
implicit def caseInt = at[Int](x β x)
implicit def caseString = at[String](_.length)
}
def fn[L <: HList](ss: L)(p: Poly1)
(implicit m: Mapper[p.type, L]): m.Out =
ss.map(p)
val xsm = fn(xs)(size)
println(xsm) // 1 :: 4 :: HNil
}
{
val xs = 1 :: "some" :: HNil
object size extends Poly1 {
implicit def caseInt = at[Int](x β x)
implicit def caseString = at[String](_.length)
}
def fn[L <: HList, R <: HList](ss: L)(p: Poly1)
(implicit m1: Mapper.Aux[p.type, L, R],
m2: Mapper[p.type, R]) =
ss.map(p).map(p)
val xsm = fn(xs)(size)
println(xsm) // 1 :: 4 :: HNil
}
{
val xs = 1 :: "some" :: HNil
object size extends Poly1 {
implicit def caseInt = at[Int](x β 1)
implicit def caseString = at[String](_.length)
}
def fn[L <: HList, R <: HList](ss: L)(p: Poly1)
(implicit m1: Mapper.Aux[p.type, L, R],
m2: Mapper[p.type, R]) =
ss.map(p).map(p)
val xsm = fn(xs)(size)
println(xsm) // 1 :: 1 :: HNil
}
{
val xs = 1 :: "other" :: 2 :: HNil
val ys = 1 :: "other" :: "ye" :: HNil
object fold extends Poly2 {
implicit def caseIntOnInt = at[Int, Int]
{ case (z, x) => z + x }
implicit def caseStringOnInt = at[Int, String]
{ case (z, x) => (z + x.length).toString }
implicit def caseIntOnString = at[String, Int]
{ case (z, x) => z.toInt + x }
implicit def caseStringOnString = at[String, String]
{ case (z, x) => (z.toInt + x.length).toString }
}
val xsm = xs.foldLeft(0)(fold)
println(xsm, xsm.getClass) // 8, int
val ysm = ys.foldLeft(0)(fold)
println(ysm, ysm.getClass) // 8, class java.lang.String
}
{
val xs = 1 :: "other" :: 1.23 :: HNil
val xsr = xs.reverse
val xsr2 = 1.23 :: "other" :: 1 :: HNil
def fn[L <: HList, R <: HList](list: L, listRev: R)
(implicit ev: Reverse.Aux[L, R],
z: Zip[L :: R :: HNil]) =
list.zip(listRev)
val xsm = fn(xs, xsr)
val xsm2 = fn(xs, xsr2)
println(xsm) //(1,1.23) :: (other,other) :: (1.23,1) :: HNil
println(xsm2) //(1,1.23) :: (other,other) :: (1.23,1) :: HNil
val xsrInvalid1 = xs
val xsrInvalid2 = xsr :: "add" :: HNil
//fn(xs, xsrInvalid1)
//fn(xs, xsrInvalid2)
}
}
| KadekM/Ndays-of-shapless | src/main/scala/Day1.scala | Scala | apache-2.0 | 5,284 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.stream.table.validation
import org.apache.flink.api.scala._
import org.apache.flink.table.api._
import org.apache.flink.table.api.internal.TableEnvironmentImpl
import org.apache.flink.table.planner.delegation.PlannerBase
import org.apache.flink.table.planner.plan.utils.JavaUserDefinedAggFunctions.WeightedAvgWithRetract
import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedAggFunctions.OverAgg0
import org.apache.flink.table.planner.utils.{StreamTableTestUtil, TableTestBase, TableTestUtil}
import org.apache.calcite.rel.RelNode
import org.junit.Test
class OverWindowValidationTest extends TableTestBase {
private val streamUtil: StreamTableTestUtil = streamTestUtil()
val table: Table = streamUtil.addDataStream[(Int, String, Long)](
"MyTable", 'a, 'b, 'c, 'proctime.proctime, 'rowtime.rowtime)
/**
* OVER clause is necessary for [[OverAgg0]] window function.
*/
@Test(expected = classOf[ValidationException])
def testInvalidOverAggregation(): Unit = {
val util = streamTestUtil()
val table = util.addDataStream[(Long, Int, String)]("T1", 'a, 'b, 'c)
val overAgg = new OverAgg0
table.select(overAgg('a, 'b))
}
/**
* OVER clause is necessary for [[OverAgg0]] window function.
*/
@Test(expected = classOf[ValidationException])
def testInvalidOverAggregation2(): Unit = {
val util = streamTestUtil()
val table = util.addDataStream[(Long, Int, String)]("T1", 'long, 'int, 'string, 'proctime)
val overAgg = new OverAgg0
table
.window(Tumble over 2.rows on 'proctime as 'w)
.groupBy('w, 'string)
.select(overAgg('long, 'int))
}
@Test(expected = classOf[ValidationException])
def testInvalidWindowAlias(): Unit = {
val result = table
.window(Over partitionBy 'c orderBy 'rowtime preceding 2.rows as 'w)
.select('c, 'b.count over 'x)
optimize(TableTestUtil.toRelNode(result))
}
@Test(expected = classOf[ValidationException])
def testOrderBy(): Unit = {
val result = table
.window(Over partitionBy 'c orderBy 'abc preceding 2.rows as 'w)
.select('c, 'b.count over 'w)
optimize(TableTestUtil.toRelNode(result))
}
@Test(expected = classOf[ValidationException])
def testPrecedingAndFollowingUsingIsLiteral(): Unit = {
val result = table
.window(Over partitionBy 'c orderBy 'rowtime preceding 2 following "xx" as 'w)
.select('c, 'b.count over 'w)
optimize(TableTestUtil.toRelNode(result))
}
@Test(expected = classOf[ValidationException])
def testPrecedingAndFollowingUsingSameType(): Unit = {
val result = table
.window(Over partitionBy 'c orderBy 'rowtime preceding 2.rows following CURRENT_RANGE as 'w)
.select('c, 'b.count over 'w)
optimize(TableTestUtil.toRelNode(result))
}
@Test(expected = classOf[ValidationException])
def testPartitionByWithUnresolved(): Unit = {
val result = table
.window(Over partitionBy 'a + 'b orderBy 'rowtime preceding 2.rows as 'w)
.select('c, 'b.count over 'w)
optimize(TableTestUtil.toRelNode(result))
}
@Test(expected = classOf[ValidationException])
def testPartitionByWithNotKeyType(): Unit = {
val table2 = streamUtil.addTableSource[(Int, String, Either[Long, String])](
"MyTable2", 'a, 'b, 'c)
val result = table2
.window(Over partitionBy 'c orderBy 'rowtime preceding 2.rows as 'w)
.select('c, 'b.count over 'w)
optimize(TableTestUtil.toRelNode(result))
}
@Test(expected = classOf[ValidationException])
def testPrecedingValue(): Unit = {
val result = table
.window(Over orderBy 'rowtime preceding -1.rows as 'w)
.select('c, 'b.count over 'w)
optimize(TableTestUtil.toRelNode(result))
}
@Test(expected = classOf[ValidationException])
def testFollowingValue(): Unit = {
val result = table
.window(Over orderBy 'rowtime preceding 1.rows following -2.rows as 'w)
.select('c, 'b.count over 'w)
optimize(TableTestUtil.toRelNode(result))
}
@Test(expected = classOf[ValidationException])
def testUdAggWithInvalidArgs(): Unit = {
val weightedAvg = new WeightedAvgWithRetract
val result = table
.window(Over orderBy 'rowtime preceding 1.minutes as 'w)
.select('c, weightedAvg('b, 'a) over 'w)
optimize(TableTestUtil.toRelNode(result))
}
@Test
def testAccessesWindowProperties(): Unit = {
thrown.expect(classOf[ValidationException])
thrown.expectMessage(
"Window start and end properties are not available for Over windows.")
table
.window(Over orderBy 'rowtime preceding 1.minutes as 'w)
.select('c, 'a.count over 'w, 'w.start + 1, 'w.end)
}
private def optimize(rel: RelNode): RelNode = {
val planner = streamUtil.tableEnv.asInstanceOf[TableEnvironmentImpl].getPlanner
planner.asInstanceOf[PlannerBase].optimize(Seq(rel)).head
}
}
| tillrohrmann/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/planner/plan/stream/table/validation/OverWindowValidationTest.scala | Scala | apache-2.0 | 5,718 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.h2o.sparkling.ml.features
import ai.h2o.sparkling.ml.algos.H2OGBM
import ai.h2o.sparkling.ml.metrics.{H2OPCAMetrics, MetricsAssertions}
import ai.h2o.sparkling.ml.models.H2OPCAMOJOModel
import ai.h2o.sparkling.{SharedH2OTestContext, TestUtils}
import org.apache.spark.ml.linalg.DenseVector
import org.apache.spark.ml.{Pipeline, PipelineModel}
import org.apache.spark.sql.SparkSession
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FunSuite, Matchers}
@RunWith(classOf[JUnitRunner])
class H2OPCATestSuite extends FunSuite with Matchers with SharedH2OTestContext {
override def createSparkSession(): SparkSession = sparkSession("local[*]")
import spark.implicits._
private lazy val dataset = spark.read
.option("header", "true")
.option("inferSchema", "true")
.csv(TestUtils.locate("smalldata/prostate/prostate.csv"))
.withColumn("CAPSULE", 'CAPSULE.cast("string"))
.withColumn("RACE", 'RACE.cast("string"))
private lazy val trainingDataset = dataset.limit(300).cache()
private lazy val testingDataset = dataset.except(trainingDataset).cache()
private lazy val standaloneModel = {
val algo = new H2OPCA()
.setSeed(1)
.setInputCols("RACE", "DPROS", "DCAPS", "PSA", "VOL", "GLEASON")
.setOutputCol("Output")
.setSplitRatio(0.8)
.setImputeMissing(true)
.setPcaMethod("Power")
.setK(3)
algo.fit(trainingDataset)
}
// Support for Spark 2.1 will be removed in SW 3.34. Tests are ignored due to a bug in Vector comparison in Spark 2.1:
// https://issues.apache.org/jira/browse/SPARK-19425
if (!createSparkSession().version.startsWith("2.1")) {
test("The standalone PCA model produces different results for various input rows.") {
val scored = standaloneModel.transform(testingDataset)
val rows = scored.take(2)
val first = rows(0).getAs[DenseVector]("Output").values.toSeq
val second = rows(1).getAs[DenseVector]("Output").values.toSeq
first.length should be(3)
second.length should be(3)
first should not equal second
}
test("The standalone PCA model can provide scoring history") {
val expectedColumns = Array("Timestamp", "Duration", "Iterations", "err", "Principal Component #")
val scoringHistoryDF = standaloneModel.getScoringHistory()
scoringHistoryDF.count() shouldBe >(10L)
scoringHistoryDF.columns shouldEqual expectedColumns
}
test("PCA model can be loaded from a file") {
val mojoName: String = "pca_prostate.mojo"
val mojoStream = this.getClass.getClassLoader.getResourceAsStream(mojoName)
val mojo = H2OPCAMOJOModel.createFromMojo(mojoStream, mojoName)
mojo.setOutputCol("Output")
val expected = standaloneModel.transform(testingDataset)
val result = mojo.transform(testingDataset)
TestUtils.assertDataFramesAreIdentical(expected, result)
}
test("The PCA model is able to transform dataset after it's saved and loaded") {
val pca = new H2OPCA()
.setSeed(1)
.setInputCols("RACE", "DPROS", "DCAPS", "PSA", "VOL", "GLEASON")
.setK(5)
.setImputeMissing(true)
.setSplitRatio(0.8)
val pipeline = new Pipeline().setStages(Array(pca))
val model = pipeline.fit(trainingDataset)
val expectedTestingDataset = model.transform(testingDataset)
val path = "build/ml/pca_save_load"
model.write.overwrite().save(path)
val loadedModel = PipelineModel.load(path)
val transformedTestingDataset = loadedModel.transform(testingDataset)
TestUtils.assertDataFramesAreIdentical(expectedTestingDataset, transformedTestingDataset)
}
test(
"A pipeline with a PCA model sourcing data from multiple columns transforms testing dataset without an exception") {
val pca = new H2OPCA()
.setInputCols("RACE", "DPROS", "DCAPS", "PSA", "VOL", "GLEASON")
.setK(4)
.setImputeMissing(true)
.setSeed(42)
val gbm = new H2OGBM()
.setFeaturesCol(pca.getOutputCol())
.setLabelCol("CAPSULE")
.setSeed(42)
val pipeline = new Pipeline().setStages(Array(pca, gbm))
val model = pipeline.fit(trainingDataset)
val numberOfPredictionsDF = model.transform(testingDataset).groupBy("prediction").count()
val rows = numberOfPredictionsDF.collect()
numberOfPredictionsDF.count() shouldBe >=(2L)
rows.foreach { row =>
assert(row.getAs[Long]("count") > 0, s"No predictions of class '${row.getAs[Int]("prediction")}'")
}
}
test("A pipeline with a PCA model sourcing data from vector column transforms testing dataset without an exception") {
val autoEncoder = new H2OAutoEncoder()
.setInputCols("RACE", "DPROS", "DCAPS", "PSA", "VOL", "GLEASON")
.setHidden(Array(100))
.setSeed(42)
val pca = new H2OPCA()
.setInputCols(autoEncoder.getOutputCol())
.setK(3)
.setImputeMissing(true)
.setSeed(42)
val gbm = new H2OGBM()
.setFeaturesCol(pca.getOutputCol())
.setLabelCol("CAPSULE")
.setSeed(42)
val pipeline = new Pipeline().setStages(Array(autoEncoder, pca, gbm))
val model = pipeline.fit(trainingDataset)
val numberOfPredictionsDF = model.transform(testingDataset).groupBy("prediction").count()
val rows = numberOfPredictionsDF.collect()
numberOfPredictionsDF.count() shouldBe >=(2L)
rows.foreach { row =>
assert(row.getAs[Long]("count") > 0, s"No predictions of class '${row.getAs[Int]("prediction")}'")
}
}
}
private def assertMetrics(model: H2OPCAMOJOModel): Unit = {
assertMetrics(model.getTrainingMetricsObject(), model.getTrainingMetrics())
assertMetrics(model.getValidationMetricsObject(), model.getValidationMetrics())
assert(model.getCrossValidationMetricsObject() == null)
assert(model.getCrossValidationMetrics() == Map())
}
private def assertMetrics(metricsObject: H2OPCAMetrics, metrics: Map[String, Double]): Unit = {
MetricsAssertions.assertMetricsObjectAgainstMetricsMap(metricsObject, metrics)
}
test("test metric objects") {
assertMetrics(standaloneModel)
standaloneModel.write.overwrite().save("ml/build/pca_model_metrics")
val loadedModel = H2OPCAMOJOModel.load("ml/build/pca_model_metrics")
assertMetrics(loadedModel)
}
}
| h2oai/sparkling-water | ml/src/test/scala/ai/h2o/sparkling/ml/features/H2OPCATestSuite.scala | Scala | apache-2.0 | 7,239 |
package ee.cone.c4actor
import ee.cone.c4actor.Types.NextOffset
trait Consuming {
def process[R](from: NextOffset, body: ConsumerβR): R
}
trait Consumer {
def poll(): List[RawEvent]
def endOffset: NextOffset
} | wregs/c4proto | c4actor-base/src/main/scala/ee/cone/c4actor/ConsumerApi.scala | Scala | apache-2.0 | 219 |
package com.productfoundry.akka.cqrs
import java.util.UUID
import play.api.libs.json.{Reads, Writes, Format}
import scala.reflect.ClassTag
import scala.util.Try
/**
* Identifier backed by uuid.
*/
trait Identifier {
def uuid: Uuid
override def toString: String = uuid.toString
}
/**
* Identifier Companion.
*/
abstract class IdentifierCompanion[I <: Identifier: ClassTag] {
val prefix = implicitly[ClassTag[I]].runtimeClass.getSimpleName
def apply(uuid: Uuid): I
def apply(s: String): I = fromString(s).getOrElse(throw new IllegalArgumentException(s))
def apply(identifier: Identifier): I = apply(identifier.uuid)
def generate(): I = apply(UUID.randomUUID)
def fromString(s: String): Option[I] = s match {
case IdentifierRegex(uuid) => Try(apply(UUID.fromString(uuid))).toOption
case _ => None
}
implicit val IdentifierFormat: Format[I] = Format(Reads.of[Uuid].map(apply), Writes(a => Writes.of[Uuid].writes(a.uuid)))
implicit val IdentifierCompanionObject: IdentifierCompanion[I] = this
private val IdentifierRegex = """([a-fA-F0-9-]{36})""".r
}
| odd/akka-cqrs | core/src/main/scala/com/productfoundry/akka/cqrs/Identifier.scala | Scala | apache-2.0 | 1,098 |
package Server.Actors
import Objects._
import Objects.ObjectTypes._
import ObjectJsonSupport._
import Server.Messages._
import Utils.{Constants, Crypto, DebugInfo}
import spray.json._
import spray.routing.RequestContext
class PageActor(var page: SecureObject, debugInfo: DebugInfo)
extends ProfileActor(page.baseObj.id, debugInfo: DebugInfo) {
def baseObject = page.baseObj
def pageReceive: Receive = {
case GetFeedMsg(rc, SecureRequest(from, _, _, _)) if baseObject.deleted => handlePageDeleted(rc, from)
case LikeMsg(rc, SecureRequest(from, _, _, _)) if baseObject.deleted => handlePageDeleted(rc, from)
case PostSecureObjMsg(rc, nPage@SecureObject(_, from, _, _, _, _)) if baseObject.deleted =>
handlePageDeleted(rc, from)
case DeleteSecureObjMsg(rc, SecureRequest(from, _, _, _)) if baseObject.deleted => handlePageDeleted(rc, from)
case GetSecureObjMsg(rc, SecureRequest(from, _, _, _)) if baseObject.deleted => handlePageDeleted(rc, from)
case DeleteSecureObjMsg(rc, SecureRequest(from, to, id, _)) if id == ObjectType.page.id =>
if (from == to) {
baseObject.delete()
handlePageDeleted(rc, from)
} else {
handleUnauthorizedRequest(rc, from)
}
case GetSecureObjMsg(rc, SecureRequest(from, to, id, _)) if id == ObjectType.page.id => rc.complete(
Crypto.constructSecureMessage(
Constants.serverId,
page.toJson.compactPrint,
Constants.userPublicKeys(from),
Constants.serverPrivateKey
)
)
case PostSecureObjMsg(rc, nPage@SecureObject(_, from, to, id, _, _)) if id == ObjectType.page.id =>
if (from == to) {
page = nPage
rc.complete(Crypto.constructSecureMessage(
Constants.serverId,
"Page Updated",
Constants.userPublicKeys(from),
Constants.serverPrivateKey
))
} else {
handleUnauthorizedRequest(rc, from)
}
}
override def receive = pageReceive orElse super.receive
def handlePageDeleted(rc: RequestContext, from: Int) = rc.complete(
Crypto.constructSecureMessage(
Constants.serverId,
"Page Deleted!",
Constants.userPublicKeys(from),
Constants.serverPrivateKey
)
)
def handleUnauthorizedRequest(rc: RequestContext, from: Int) = rc.complete(
Crypto.constructSecureMessage(
Constants.serverId,
"Unauthorized Request! Not Request!",
Constants.userPublicKeys(from),
Constants.serverPrivateKey
)
)
} | Nirespire/SecureFacebookAPI | src/main/scala/Server/Actors/PageActor.scala | Scala | mit | 2,503 |
package tree
import org.scalatest._
import java.io.File
import java.nio.file.Path
// TODO: This test doesnt cover a symlink to a file
class CRUDSpec
extends FunSpec
with Fixture
{
describe("A DemonstrationBPlusTree"){
val t = bPlusTree
it("should load 12 elements, then be size 12") {
data.slice(0, 12).foreach (t += _)
assert(t.size === 12)
}
it("element 9 (using get()) should be 'Barbara Ann'") {
assert(t.get(9).get === "Barbara Ann")
}
it("after using update(), element 9 (using get()) should be 'Help Me, Rhonda'") {
t.update(9, "Help Me, Rhonda")
assert(t.get(9).get === "Help Me, Rhonda")
}
it("after deleting three elements, size should be 9") {
t --= Seq(4, 7, 12)
assert(t.size === 9)
}
it("should delete down to zero with a tree of three elements (can cause problems!)") {
t.clear()
data.slice(0, 3).foreach (t += _)
t -= 1
t -= 2
t -= 3
assert(t.size === 0)
}
it("should robustly refuse to delete from an empty tree") {
t -= 4
assert(t.size === 0)
}
}//describe
}//CRUDSpec
| rcrowther/DemonstrationBPlusTree | test/CRUDSpec.scala | Scala | gpl-3.0 | 1,154 |
package com.joescii.sbtjs
import sbt._
import Keys._
sealed trait Framework // Mostly placeholder for now
sealed trait Browser
object SbtJsTestPlugin extends AutoPlugin with SbtJsTestKeys {
import SbtJsTestTasks._
object autoImport extends SbtJsTestKeys {
object JsTestBrowsers {
case object Firefox38 extends Browser
// case object InternetExplorer8 extends Browser
case object InternetExplorer11 extends Browser
case object Chrome extends Browser
// case object Edge extends Browser
}
object JsTestFrameworks {
case object Jasmine2 extends Framework
}
}
override def trigger = allRequirements
override lazy val projectSettings = sbtJsTestSettings
lazy val sbtJsTestSettings:Seq[Def.Setting[_]] = List(
jsResources <<= (sourceDirectory in Compile, unmanagedResourceDirectories in Compile) { (main, rsrc) =>
((main / "js") +: (main / "javascript") +: rsrc).flatMap(r => (r ** "*.js").get)
},
watchSources <++= jsResources.map(identity),
jsTestResources <<= (sourceDirectory in Test, unmanagedResourceDirectories in Test) { (test, rsrc) =>
((test / "js") +: (test / "javascript") +: rsrc).flatMap(r => (r ** "*.js").get)
},
watchSources <++= jsTestResources.map(identity),
jsTestColor := true,
jsTestBrowsers := Seq(autoImport.JsTestBrowsers.Chrome),
jsFrameworks := Seq(autoImport.JsTestFrameworks.Jasmine2),
jsTestTargetDir <<= (target in Test) (_ / "sbt-js-test"),
jsAsyncWait := false,
jsAsyncWaitTimeout := None,
jsTest <<= jsTestTask,
jsTestOnly <<= jsTestOnlyTask,
jsLs <<= jsLsTask
)
}
| joescii/sbt-js-test | src/main/scala/com/joescii/sbtjs/SbtJsTestPlugin.scala | Scala | apache-2.0 | 1,640 |
/*
* Copyright (C) 2014 HMPerson1 <[email protected]>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package hmperson1.apps.superdoughnuts
import hmperson1.apps.superdoughnuts.gui.GuiManager
import hmperson1.apps.superdoughnuts.logic.GameManager
import javax.swing.JPanel
object SuperDoughnuts {
final val KEY_PLAYERNAME = "playerName"
final val KEY_DIFFICLUTY = "difficluty"
val delegate = new KeyListenerDelegate(null)
var startPanel: JPanel = null
var gamePanel: JPanel = null
var gameManager: GameManager = null
/**
* @param args the command line arguments
*/
def main(args: Array[String]): Unit = {
GuiManager.doOnEdt(() => {
startPanel = GuiManager.createStartPanel(
(map: Map[String, String]) => {
gameManager = GameManager create map(KEY_DIFFICLUTY).toInt
delegate.delegatee = gameManager.keyListener
GuiManager showPanel gamePanel
},
() => System.exit(0))
gamePanel = GuiManager.createGamePanel(
() => {
GuiManager showPanel startPanel
gameManager.stop()
},
() => gameManager.state, delegate)
GuiManager renameFrame "Doughnuts"
GuiManager showPanel startPanel
})
}
}
| HMPerson1/superdoughnuts | src/hmperson1/apps/superdoughnuts/SuperDoughnuts.scala | Scala | gpl-3.0 | 1,846 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.adaptive
import scala.collection.mutable
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.plans.physical.SinglePartition
import org.apache.spark.sql.execution.{ShufflePartitionSpec, SparkPlan, UnaryExecNode, UnionExec}
import org.apache.spark.sql.execution.exchange.{ENSURE_REQUIREMENTS, REBALANCE_PARTITIONS_BY_COL, REBALANCE_PARTITIONS_BY_NONE, REPARTITION_BY_COL, ShuffleExchangeLike, ShuffleOrigin}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.util.Utils
/**
* A rule to coalesce the shuffle partitions based on the map output statistics, which can
* avoid many small reduce tasks that hurt performance.
*/
case class CoalesceShufflePartitions(session: SparkSession) extends AQEShuffleReadRule {
override val supportedShuffleOrigins: Seq[ShuffleOrigin] =
Seq(ENSURE_REQUIREMENTS, REPARTITION_BY_COL, REBALANCE_PARTITIONS_BY_NONE,
REBALANCE_PARTITIONS_BY_COL)
override def isSupported(shuffle: ShuffleExchangeLike): Boolean = {
shuffle.outputPartitioning != SinglePartition && super.isSupported(shuffle)
}
override def apply(plan: SparkPlan): SparkPlan = {
if (!conf.coalesceShufflePartitionsEnabled) {
return plan
}
// Ideally, this rule should simply coalesce partitions w.r.t. the target size specified by
// ADVISORY_PARTITION_SIZE_IN_BYTES (default 64MB). To avoid perf regression in AQE, this
// rule by default tries to maximize the parallelism and set the target size to
// `total shuffle size / Spark default parallelism`. In case the `Spark default parallelism`
// is too big, this rule also respect the minimum partition size specified by
// COALESCE_PARTITIONS_MIN_PARTITION_SIZE (default 1MB).
// For history reason, this rule also need to support the config
// COALESCE_PARTITIONS_MIN_PARTITION_NUM. We should remove this config in the future.
val minNumPartitions = conf.getConf(SQLConf.COALESCE_PARTITIONS_MIN_PARTITION_NUM).getOrElse {
if (conf.getConf(SQLConf.COALESCE_PARTITIONS_PARALLELISM_FIRST)) {
// We fall back to Spark default parallelism if the minimum number of coalesced partitions
// is not set, so to avoid perf regressions compared to no coalescing.
session.sparkContext.defaultParallelism
} else {
// If we don't need to maximize the parallelism, we set `minPartitionNum` to 1, so that
// the specified advisory partition size will be respected.
1
}
}
val advisoryTargetSize = conf.getConf(SQLConf.ADVISORY_PARTITION_SIZE_IN_BYTES)
val minPartitionSize = if (Utils.isTesting) {
// In the tests, we usually set the target size to a very small value that is even smaller
// than the default value of the min partition size. Here we also adjust the min partition
// size to be not larger than 20% of the target size, so that the tests don't need to set
// both configs all the time to check the coalescing behavior.
conf.getConf(SQLConf.COALESCE_PARTITIONS_MIN_PARTITION_SIZE).min(advisoryTargetSize / 5)
} else {
conf.getConf(SQLConf.COALESCE_PARTITIONS_MIN_PARTITION_SIZE)
}
// Sub-plans under the Union operator can be coalesced independently, so we can divide them
// into independent "coalesce groups", and all shuffle stages within each group have to be
// coalesced together.
val coalesceGroups = collectCoalesceGroups(plan)
// Divide minimum task parallelism among coalesce groups according to their data sizes.
val minNumPartitionsByGroup = if (coalesceGroups.length == 1) {
Seq(math.max(minNumPartitions, 1))
} else {
val sizes =
coalesceGroups.map(_.flatMap(_.shuffleStage.mapStats.map(_.bytesByPartitionId.sum)).sum)
val totalSize = sizes.sum
sizes.map { size =>
val num = if (totalSize > 0) {
math.round(minNumPartitions * 1.0 * size / totalSize)
} else {
minNumPartitions
}
math.max(num.toInt, 1)
}
}
val specsMap = mutable.HashMap.empty[Int, Seq[ShufflePartitionSpec]]
// Coalesce partitions for each coalesce group independently.
coalesceGroups.zip(minNumPartitionsByGroup).foreach { case (shuffleStages, minNumPartitions) =>
val newPartitionSpecs = ShufflePartitionsUtil.coalescePartitions(
shuffleStages.map(_.shuffleStage.mapStats),
shuffleStages.map(_.partitionSpecs),
advisoryTargetSize = advisoryTargetSize,
minNumPartitions = minNumPartitions,
minPartitionSize = minPartitionSize)
if (newPartitionSpecs.nonEmpty) {
shuffleStages.zip(newPartitionSpecs).map { case (stageInfo, partSpecs) =>
specsMap.put(stageInfo.shuffleStage.id, partSpecs)
}
}
}
if (specsMap.nonEmpty) {
updateShuffleReads(plan, specsMap.toMap)
} else {
plan
}
}
/**
* Gather all coalesce-able groups such that the shuffle stages in each child of a Union operator
* are in their independent groups if:
* 1) all leaf nodes of this child are shuffle stages; and
* 2) all these shuffle stages support coalescing.
*/
private def collectCoalesceGroups(plan: SparkPlan): Seq[Seq[ShuffleStageInfo]] = plan match {
case r @ AQEShuffleReadExec(q: ShuffleQueryStageExec, _) if isSupported(q.shuffle) =>
Seq(collectShuffleStageInfos(r))
case unary: UnaryExecNode => collectCoalesceGroups(unary.child)
case union: UnionExec => union.children.flatMap(collectCoalesceGroups)
// If not all leaf nodes are query stages, it's not safe to reduce the number of shuffle
// partitions, because we may break the assumption that all children of a spark plan have
// same number of output partitions.
case p if p.collectLeaves().forall(_.isInstanceOf[QueryStageExec]) =>
val shuffleStages = collectShuffleStageInfos(p)
// ShuffleExchanges introduced by repartition do not support partition number change.
// We change the number of partitions only if all the ShuffleExchanges support it.
if (shuffleStages.forall(s => isSupported(s.shuffleStage.shuffle))) {
Seq(shuffleStages)
} else {
Seq.empty
}
case _ => Seq.empty
}
private def collectShuffleStageInfos(plan: SparkPlan): Seq[ShuffleStageInfo] = plan match {
case ShuffleStageInfo(stage, specs) => Seq(new ShuffleStageInfo(stage, specs))
case _ => plan.children.flatMap(collectShuffleStageInfos)
}
private def updateShuffleReads(
plan: SparkPlan, specsMap: Map[Int, Seq[ShufflePartitionSpec]]): SparkPlan = plan match {
// Even for shuffle exchange whose input RDD has 0 partition, we should still update its
// `partitionStartIndices`, so that all the leaf shuffles in a stage have the same
// number of output partitions.
case ShuffleStageInfo(stage, _) =>
specsMap.get(stage.id).map { specs =>
AQEShuffleReadExec(stage, specs)
}.getOrElse(plan)
case other => other.mapChildren(updateShuffleReads(_, specsMap))
}
}
private class ShuffleStageInfo(
val shuffleStage: ShuffleQueryStageExec,
val partitionSpecs: Option[Seq[ShufflePartitionSpec]])
private object ShuffleStageInfo {
def unapply(plan: SparkPlan)
: Option[(ShuffleQueryStageExec, Option[Seq[ShufflePartitionSpec]])] = plan match {
case stage: ShuffleQueryStageExec =>
Some((stage, None))
case AQEShuffleReadExec(s: ShuffleQueryStageExec, partitionSpecs) =>
Some((s, Some(partitionSpecs)))
case _ => None
}
}
| ueshin/apache-spark | sql/core/src/main/scala/org/apache/spark/sql/execution/adaptive/CoalesceShufflePartitions.scala | Scala | apache-2.0 | 8,388 |
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
package elements
import com.intellij.lang.ASTNode
import com.intellij.psi.PsiElement
import com.intellij.psi.stubs._
import org.jetbrains.plugins.scala.lang.psi.api.base.ScFieldId
import org.jetbrains.plugins.scala.lang.psi.impl.base.ScFieldIdImpl
import org.jetbrains.plugins.scala.lang.psi.stubs.impl.ScFieldIdStubImpl
/**
* User: Alexander Podkhalyuzin
* Date: 19.07.2009
*/
class ScFieldIdElementType extends ScStubElementType[ScFieldIdStub, ScFieldId]("field id") {
override def serialize(stub: ScFieldIdStub, dataStream: StubOutputStream): Unit = {
dataStream.writeName(stub.getName)
}
override def deserialize(dataStream: StubInputStream, parentStub: StubElement[_ <: PsiElement]): ScFieldIdStub =
new ScFieldIdStubImpl(parentStub, this, name = dataStream.readNameString())
override def createStubImpl(psi: ScFieldId, parentStub: StubElement[_ <: PsiElement]): ScFieldIdStub =
new ScFieldIdStubImpl(parentStub, this, name = psi.name)
override def createElement(node: ASTNode): ScFieldId = new ScFieldIdImpl(node)
override def createPsi(stub: ScFieldIdStub): ScFieldId = new ScFieldIdImpl(stub)
} | JetBrains/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/lang/psi/stubs/elements/ScFieldIdElementType.scala | Scala | apache-2.0 | 1,213 |
package net.iakovlev.dynamo.generic.test
import com.amazonaws.services.dynamodbv2.{model => aws}
import net.iakovlev.dynamo.generic.AwsAttributeValueDecoder
import org.specs2.mutable.Specification
class DecodeCaseObjectsTest
extends Specification
with AwsAttributeValueDecoder {
"Decode case objects as strings" >> {
sealed trait ADT
case object A extends ADT
case object B extends ADT
case class O(a: ADT, b: ADT)
val res =
awsDecoder[O](
Map("a" -> new aws.AttributeValue("A"),
"b" -> new aws.AttributeValue("B")))
res must beRight(O(A, B))
}
}
| RomanIakovlev/easycodecs | aws-dynamodb-bindings/src/test/scala/net/iakovlev/dynamo/generic/test/DecodeCaseObjectsTest.scala | Scala | apache-2.0 | 610 |
package lila.coach
import org.joda.time.DateTime
import chess.Status
import lila.game.Pov
import lila.rating.PerfType
case class PerfResults(
base: Results,
bestRating: Option[PerfResults.BestRating],
winStreak: PerfResults.Streak, // nb games won in a row
awakeMinutesStreak: PerfResults.Streak, // minutes played without sleeping
dayStreak: PerfResults.Streak, // days played in a row
outcomeStatuses: PerfResults.OutcomeStatuses) {
def aggregate(p: RichPov) = copy(
bestRating = if (~p.pov.win) {
PerfResults.makeBestRating(p.pov).fold(bestRating) { newBest =>
bestRating.fold(newBest) { prev =>
if (newBest.rating > prev.rating) newBest else prev
}.some
}
}
else bestRating,
outcomeStatuses = outcomeStatuses.aggregate(p))
}
object PerfResults {
case class BestRating(id: String, userId: String, rating: Int)
def makeBestRating(pov: Pov): Option[BestRating] =
pov.opponent.userId |@| pov.player.ratingAfter apply {
case (opId, myRating) => BestRating(pov.gameId, opId, myRating)
}
case class PerfResultsMap(m: Map[PerfType, PerfResults]) {
def sorted: List[(PerfType, PerfResults)] = m.toList.sortBy(-_._2.base.nbGames)
}
val emptyPerfResultsMap = PerfResultsMap(Map.empty)
case class StatusScores(m: Map[Status, Int]) {
def add(s: Status) = copy(m = m + (s -> m.get(s).fold(1)(1+)))
def sorted: List[(Status, Int)] = m.toList.sortBy(-_._2)
lazy val sum: Int = m.foldLeft(0)(_ + _._2)
}
case class OutcomeStatuses(win: StatusScores, loss: StatusScores) {
def aggregate(p: RichPov) = copy(
win = if (~p.pov.win) win add p.pov.game.status else win,
loss = if (~p.pov.loss) loss add p.pov.game.status else loss)
}
val emptyOutcomeStatuses = OutcomeStatuses(StatusScores(Map.empty), StatusScores(Map.empty))
case class Streak(cur: Int, best: Int) {
def add(v: Int) = copy(cur = cur + v, best = best max (cur + v))
def reset = copy(cur = 0)
def set(v: Int) = copy(cur = v)
}
val emptyStreak = Streak(0, 0)
val empty = PerfResults(Results.empty, none, emptyStreak, emptyStreak, emptyStreak, emptyOutcomeStatuses)
case class Computation(
results: PerfResults,
base: Results.Computation,
previousEndDate: Option[DateTime],
previousWin: Boolean) {
def aggregate(p: RichPov) = copy(
results = results.aggregate(p).copy(
winStreak = if (~p.pov.win) {
if (previousWin) results.winStreak.add(1)
else results.winStreak.set(1)
}
else results.winStreak.reset,
awakeMinutesStreak = results.awakeMinutesStreak,
dayStreak = (previousEndDate |@| p.pov.game.updatedAt) apply {
case (prev, next) if prev.getDayOfYear == next.getDayOfYear => results.dayStreak
case (prev, next) if next.minusDays(1).isBefore(prev) => results.dayStreak.add(1)
case _ => results.dayStreak.reset
} getOrElse results.dayStreak.reset
),
base = base.aggregate(p),
previousEndDate = p.pov.game.updatedAt,
previousWin = ~p.pov.win)
def run = results.copy(base = base.run)
}
val emptyComputation = Computation(empty, Results.emptyComputation, none, false)
}
| abougouffa/lila | modules/coach/src/main/PerfResults.scala | Scala | mit | 3,259 |
package mesosphere.marathon
import scala.util.{ Failure, Try }
class MarathonConfTest extends MarathonSpec {
private[this] val principal = "foo"
private[this] val secretFile = "/bar/baz"
test("MesosAuthenticationIsOptional") {
val conf = makeConfig(
"--master", "127.0.0.1:5050"
)
assert(conf.mesosAuthenticationPrincipal.isEmpty)
assert(conf.mesosAuthenticationSecretFile.isEmpty)
assert(conf.checkpoint.get == Some(true))
}
test("MesosAuthenticationPrincipal") {
val conf = makeConfig(
"--master", "127.0.0.1:5050",
"--mesos_authentication_principal", principal
)
assert(conf.mesosAuthenticationPrincipal.isDefined)
assert(conf.mesosAuthenticationPrincipal.get == Some(principal))
assert(conf.mesosAuthenticationSecretFile.isEmpty)
}
test("MesosAuthenticationSecretFile") {
val conf = makeConfig(
"--master", "127.0.0.1:5050",
"--mesos_authentication_principal", principal,
"--mesos_authentication_secret_file", secretFile
)
assert(conf.mesosAuthenticationPrincipal.isDefined)
assert(conf.mesosAuthenticationPrincipal.get == Some(principal))
assert(conf.mesosAuthenticationSecretFile.isDefined)
assert(conf.mesosAuthenticationSecretFile.get == Some(secretFile))
}
test("HA mode is enabled by default") {
val conf = defaultConfig()
assert(conf.highlyAvailable())
}
test("Disable HA mode") {
val conf = makeConfig(
"--master", "127.0.0.1:5050",
"--disable_ha"
)
assert(!conf.highlyAvailable())
}
test("Checkpointing is enabled by default") {
val conf = defaultConfig()
assert(conf.checkpoint())
}
test("Disable checkpointing") {
val conf = makeConfig(
"--master", "127.0.0.1:5050",
"--disable_checkpoint"
)
assert(!conf.checkpoint())
}
test("MarathonStoreTimeOut") {
val conf = makeConfig(
"--master", "127.0.0.1:5050",
"--marathon_store_timeout", "5000"
)
assert(conf.marathonStoreTimeout.isDefined)
assert(conf.marathonStoreTimeout.get == Some(5000))
}
test("--default_accepted_resource_roles *,marathon will fail without --mesos_role marathon") {
val triedConfig = Try(makeConfig(
"--master", "127.0.0.1:5050",
"--default_accepted_resource_roles", "*,marathon"
))
assert(triedConfig.isFailure)
triedConfig match {
case Failure(e) if e.getMessage ==
"requirement failed: " +
"--default_accepted_resource_roles contains roles for which we will not receive offers: marathon" =>
case other =>
fail(s"unexpected triedConfig: $other")
}
}
test("--default_accepted_resource_roles *,marathon with --mesos_role marathon") {
val conf = makeConfig(
"--master", "127.0.0.1:5050",
"--mesos_role", "marathon",
"--default_accepted_resource_roles", "*,marathon"
)
assert(conf.defaultAcceptedResourceRolesSet == Set("*", "marathon"))
}
test("--default_accepted_resource_roles *") {
val conf = makeConfig(
"--master", "127.0.0.1:5050",
"--default_accepted_resource_roles", "*"
)
assert(conf.defaultAcceptedResourceRolesSet == Set("*"))
}
test("--default_accepted_resource_roles default without --mesos_role") {
val conf = makeConfig(
"--master", "127.0.0.1:5050"
)
assert(conf.defaultAcceptedResourceRolesSet == Set("*"))
}
test("--default_accepted_resource_roles default with --mesos_role") {
val conf = makeConfig(
"--master", "127.0.0.1:5050",
"--mesos_role", "marathon"
)
assert(conf.defaultAcceptedResourceRolesSet == Set("*", "marathon"))
}
}
| sledigabel/marathon | src/test/scala/mesosphere/marathon/MarathonConfTest.scala | Scala | apache-2.0 | 3,664 |
// Generated by the Scala Plugin for the Protocol Buffer Compiler.
// Do not edit!
//
// Protofile syntax: PROTO3
package mytypes.duration
object DurationProto extends _root_.scalapb.GeneratedFileObject {
lazy val dependencies: Seq[_root_.scalapb.GeneratedFileObject] = Seq(
scalapb.options.ScalapbProto
)
lazy val messagesCompanions: Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]] =
Seq[_root_.scalapb.GeneratedMessageCompanion[_ <: _root_.scalapb.GeneratedMessage]](
mytypes.duration.Duration
)
private lazy val ProtoBytes: _root_.scala.Array[Byte] =
scalapb.Encoding.fromBase64(scala.collection.immutable.Seq(
"""Cg5kdXJhdGlvbi5wcm90bxIHbXl0eXBlcxoVc2NhbGFwYi9zY2FsYXBiLnByb3RvIk8KCER1cmF0aW9uEiYKB3NlY29uZHMYA
SABKAVCDOI/CRIHc2Vjb25kc1IHc2Vjb25kczob4j8YIhZteXR5cGVzLk15RHVyYXRpb25UeXBlYgZwcm90bzM="""
).mkString)
lazy val scalaDescriptor: _root_.scalapb.descriptors.FileDescriptor = {
val scalaProto = com.google.protobuf.descriptor.FileDescriptorProto.parseFrom(ProtoBytes)
_root_.scalapb.descriptors.FileDescriptor.buildFrom(scalaProto, dependencies.map(_.scalaDescriptor))
}
lazy val javaDescriptor: com.google.protobuf.Descriptors.FileDescriptor = {
val javaProto = com.google.protobuf.DescriptorProtos.FileDescriptorProto.parseFrom(ProtoBytes)
com.google.protobuf.Descriptors.FileDescriptor.buildFrom(javaProto, _root_.scala.Array(
scalapb.options.ScalapbProto.javaDescriptor
))
}
@deprecated("Use javaDescriptor instead. In a future version this will refer to scalaDescriptor.", "ScalaPB 0.5.47")
def descriptor: com.google.protobuf.Descriptors.FileDescriptor = javaDescriptor
} | trueaccord/ScalaPB | docs/src/main/scala/mytypes/duration/DurationProto.scala | Scala | apache-2.0 | 1,707 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.io.{Externalizable, ObjectInput, ObjectOutput}
import java.sql.{Date, Timestamp}
import org.apache.spark.sql.catalyst.encoders.{OuterScopes, RowEncoder}
import org.apache.spark.sql.catalyst.plans.{LeftAnti, LeftSemi}
import org.apache.spark.sql.catalyst.util.sideBySide
import org.apache.spark.sql.execution.{LogicalRDD, RDDScanExec}
import org.apache.spark.sql.execution.exchange.{BroadcastExchangeExec, ShuffleExchange}
import org.apache.spark.sql.execution.streaming.MemoryStream
import org.apache.spark.sql.functions._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.sql.types._
case class TestDataPoint(x: Int, y: Double, s: String, t: TestDataPoint2)
case class TestDataPoint2(x: Int, s: String)
object TestForTypeAlias {
type TwoInt = (Int, Int)
type ThreeInt = (TwoInt, Int)
type SeqOfTwoInt = Seq[TwoInt]
def tupleTypeAlias: TwoInt = (1, 1)
def nestedTupleTypeAlias: ThreeInt = ((1, 1), 2)
def seqOfTupleTypeAlias: SeqOfTwoInt = Seq((1, 1), (2, 2))
}
class DatasetSuite extends QueryTest with SharedSQLContext {
import testImplicits._
private implicit val ordering = Ordering.by((c: ClassData) => c.a -> c.b)
test("checkAnswer should compare map correctly") {
val data = Seq((1, "2", Map(1 -> 2, 2 -> 1)))
checkAnswer(
data.toDF(),
Seq(Row(1, "2", Map(2 -> 1, 1 -> 2))))
}
test("toDS") {
val data = Seq(("a", 1), ("b", 2), ("c", 3))
checkDataset(
data.toDS(),
data: _*)
}
test("toDS with RDD") {
val ds = sparkContext.makeRDD(Seq("a", "b", "c"), 3).toDS()
checkDataset(
ds.mapPartitions(_ => Iterator(1)),
1, 1, 1)
}
test("emptyDataset") {
val ds = spark.emptyDataset[Int]
assert(ds.count() == 0L)
assert(ds.collect() sameElements Array.empty[Int])
}
test("range") {
assert(spark.range(10).map(_ + 1).reduce(_ + _) == 55)
assert(spark.range(10).map{ case i: java.lang.Long => i + 1 }.reduce(_ + _) == 55)
assert(spark.range(0, 10).map(_ + 1).reduce(_ + _) == 55)
assert(spark.range(0, 10).map{ case i: java.lang.Long => i + 1 }.reduce(_ + _) == 55)
assert(spark.range(0, 10, 1, 2).map(_ + 1).reduce(_ + _) == 55)
assert(spark.range(0, 10, 1, 2).map{ case i: java.lang.Long => i + 1 }.reduce(_ + _) == 55)
}
test("SPARK-12404: Datatype Helper Serializability") {
val ds = sparkContext.parallelize((
new Timestamp(0),
new Date(0),
java.math.BigDecimal.valueOf(1),
scala.math.BigDecimal(1)) :: Nil).toDS()
ds.collect()
}
test("collect, first, and take should use encoders for serialization") {
val item = NonSerializableCaseClass("abcd")
val ds = Seq(item).toDS()
assert(ds.collect().head == item)
assert(ds.collectAsList().get(0) == item)
assert(ds.first() == item)
assert(ds.take(1).head == item)
assert(ds.takeAsList(1).get(0) == item)
assert(ds.toLocalIterator().next() === item)
}
test("coalesce, repartition") {
val data = (1 to 100).map(i => ClassData(i.toString, i))
val ds = data.toDS()
intercept[IllegalArgumentException] {
ds.coalesce(0)
}
intercept[IllegalArgumentException] {
ds.repartition(0)
}
assert(ds.repartition(10).rdd.partitions.length == 10)
checkDatasetUnorderly(
ds.repartition(10),
data: _*)
assert(ds.coalesce(1).rdd.partitions.length == 1)
checkDatasetUnorderly(
ds.coalesce(1),
data: _*)
}
test("as tuple") {
val data = Seq(("a", 1), ("b", 2)).toDF("a", "b")
checkDataset(
data.as[(String, Int)],
("a", 1), ("b", 2))
}
test("as case class / collect") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDF("a", "b").as[ClassData]
checkDataset(
ds,
ClassData("a", 1), ClassData("b", 2), ClassData("c", 3))
assert(ds.collect().head == ClassData("a", 1))
}
test("as case class - reordered fields by name") {
val ds = Seq((1, "a"), (2, "b"), (3, "c")).toDF("b", "a").as[ClassData]
assert(ds.collect() === Array(ClassData("a", 1), ClassData("b", 2), ClassData("c", 3)))
}
test("as case class - take") {
val ds = Seq((1, "a"), (2, "b"), (3, "c")).toDF("b", "a").as[ClassData]
assert(ds.take(2) === Array(ClassData("a", 1), ClassData("b", 2)))
}
test("as seq of case class - reorder fields by name") {
val df = spark.range(3).select(array(struct($"id".cast("int").as("b"), lit("a").as("a"))))
val ds = df.as[Seq[ClassData]]
assert(ds.collect() === Array(
Seq(ClassData("a", 0)),
Seq(ClassData("a", 1)),
Seq(ClassData("a", 2))))
}
test("map") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.map(v => (v._1, v._2 + 1)),
("a", 2), ("b", 3), ("c", 4))
}
test("map with type change with the exact matched number of attributes") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.map(identity[(String, Int)])
.as[OtherTuple]
.map(identity[OtherTuple]),
OtherTuple("a", 1), OtherTuple("b", 2), OtherTuple("c", 3))
}
test("map with type change with less attributes") {
val ds = Seq(("a", 1, 3), ("b", 2, 4), ("c", 3, 5)).toDS()
checkDataset(
ds.as[OtherTuple]
.map(identity[OtherTuple]),
OtherTuple("a", 1), OtherTuple("b", 2), OtherTuple("c", 3))
}
test("map and group by with class data") {
// We inject a group by here to make sure this test case is future proof
// when we implement better pipelining and local execution mode.
val ds: Dataset[(ClassData, Long)] = Seq(ClassData("one", 1), ClassData("two", 2)).toDS()
.map(c => ClassData(c.a, c.b + 1))
.groupByKey(p => p).count()
checkDatasetUnorderly(
ds,
(ClassData("one", 2), 1L), (ClassData("two", 3), 1L))
}
test("select") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.select(expr("_2 + 1").as[Int]),
2, 3, 4)
}
test("SPARK-16853: select, case class and tuple") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.select(expr("struct(_2, _2)").as[(Int, Int)]): Dataset[(Int, Int)],
(1, 1), (2, 2), (3, 3))
checkDataset(
ds.select(expr("named_struct('a', _1, 'b', _2)").as[ClassData]): Dataset[ClassData],
ClassData("a", 1), ClassData("b", 2), ClassData("c", 3))
}
test("select 2") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.select(
expr("_1").as[String],
expr("_2").as[Int]) : Dataset[(String, Int)],
("a", 1), ("b", 2), ("c", 3))
}
test("select 2, primitive and tuple") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.select(
expr("_1").as[String],
expr("struct(_2, _2)").as[(Int, Int)]),
("a", (1, 1)), ("b", (2, 2)), ("c", (3, 3)))
}
test("select 2, primitive and class") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.select(
expr("_1").as[String],
expr("named_struct('a', _1, 'b', _2)").as[ClassData]),
("a", ClassData("a", 1)), ("b", ClassData("b", 2)), ("c", ClassData("c", 3)))
}
test("select 2, primitive and class, fields reordered") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.select(
expr("_1").as[String],
expr("named_struct('b', _2, 'a', _1)").as[ClassData]),
("a", ClassData("a", 1)), ("b", ClassData("b", 2)), ("c", ClassData("c", 3)))
}
test("REGEX column specification") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
withSQLConf(SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> "false") {
var e = intercept[AnalysisException] {
ds.select(expr("`(_1)?+.+`").as[Int])
}.getMessage
assert(e.contains("cannot resolve '`(_1)?+.+`'"))
e = intercept[AnalysisException] {
ds.select(expr("`(_1|_2)`").as[Int])
}.getMessage
assert(e.contains("cannot resolve '`(_1|_2)`'"))
e = intercept[AnalysisException] {
ds.select(ds("`(_1)?+.+`"))
}.getMessage
assert(e.contains("Cannot resolve column name \\"`(_1)?+.+`\\""))
e = intercept[AnalysisException] {
ds.select(ds("`(_1|_2)`"))
}.getMessage
assert(e.contains("Cannot resolve column name \\"`(_1|_2)`\\""))
}
withSQLConf(SQLConf.SUPPORT_QUOTED_REGEX_COLUMN_NAME.key -> "true") {
checkDataset(
ds.select(ds.col("_2")).as[Int],
1, 2, 3)
checkDataset(
ds.select(ds.colRegex("`(_1)?+.+`")).as[Int],
1, 2, 3)
checkDataset(
ds.select(ds("`(_1|_2)`"))
.select(expr("named_struct('a', _1, 'b', _2)").as[ClassData]),
ClassData("a", 1), ClassData("b", 2), ClassData("c", 3))
checkDataset(
ds.alias("g")
.select(ds("g.`(_1|_2)`"))
.select(expr("named_struct('a', _1, 'b', _2)").as[ClassData]),
ClassData("a", 1), ClassData("b", 2), ClassData("c", 3))
checkDataset(
ds.select(ds("`(_1)?+.+`"))
.select(expr("_2").as[Int]),
1, 2, 3)
checkDataset(
ds.alias("g")
.select(ds("g.`(_1)?+.+`"))
.select(expr("_2").as[Int]),
1, 2, 3)
checkDataset(
ds.select(expr("`(_1)?+.+`").as[Int]),
1, 2, 3)
val m = ds.select(expr("`(_1|_2)`"))
checkDataset(
ds.select(expr("`(_1|_2)`"))
.select(expr("named_struct('a', _1, 'b', _2)").as[ClassData]),
ClassData("a", 1), ClassData("b", 2), ClassData("c", 3))
checkDataset(
ds.alias("g")
.select(expr("g.`(_1)?+.+`").as[Int]),
1, 2, 3)
checkDataset(
ds.alias("g")
.select(expr("g.`(_1|_2)`"))
.select(expr("named_struct('a', _1, 'b', _2)").as[ClassData]),
ClassData("a", 1), ClassData("b", 2), ClassData("c", 3))
}
}
test("filter") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.filter(_._1 == "b"),
("b", 2))
}
test("filter and then select") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
checkDataset(
ds.filter(_._1 == "b").select(expr("_1").as[String]),
"b")
}
test("SPARK-15632: typed filter should preserve the underlying logical schema") {
val ds = spark.range(10)
val ds2 = ds.filter(_ > 3)
assert(ds.schema.equals(ds2.schema))
}
test("foreach") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
val acc = sparkContext.longAccumulator
ds.foreach(v => acc.add(v._2))
assert(acc.value == 6)
}
test("foreachPartition") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
val acc = sparkContext.longAccumulator
ds.foreachPartition((it: Iterator[(String, Int)]) => it.foreach(v => acc.add(v._2)))
assert(acc.value == 6)
}
test("reduce") {
val ds = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
assert(ds.reduce((a, b) => ("sum", a._2 + b._2)) == (("sum", 6)))
}
test("joinWith, flat schema") {
val ds1 = Seq(1, 2, 3).toDS().as("a")
val ds2 = Seq(1, 2).toDS().as("b")
checkDataset(
ds1.joinWith(ds2, $"a.value" === $"b.value", "inner"),
(1, 1), (2, 2))
}
test("joinWith tuple with primitive, expression") {
val ds1 = Seq(1, 1, 2).toDS()
val ds2 = Seq(("a", 1), ("b", 2)).toDS()
checkDataset(
ds1.joinWith(ds2, $"value" === $"_2"),
(1, ("a", 1)), (1, ("a", 1)), (2, ("b", 2)))
}
test("joinWith class with primitive, toDF") {
val ds1 = Seq(1, 1, 2).toDS()
val ds2 = Seq(ClassData("a", 1), ClassData("b", 2)).toDS()
checkAnswer(
ds1.joinWith(ds2, $"value" === $"b").toDF().select($"_1", $"_2.a", $"_2.b"),
Row(1, "a", 1) :: Row(1, "a", 1) :: Row(2, "b", 2) :: Nil)
}
test("multi-level joinWith") {
val ds1 = Seq(("a", 1), ("b", 2)).toDS().as("a")
val ds2 = Seq(("a", 1), ("b", 2)).toDS().as("b")
val ds3 = Seq(("a", 1), ("b", 2)).toDS().as("c")
checkDataset(
ds1.joinWith(ds2, $"a._2" === $"b._2").as("ab").joinWith(ds3, $"ab._1._2" === $"c._2"),
((("a", 1), ("a", 1)), ("a", 1)),
((("b", 2), ("b", 2)), ("b", 2)))
}
test("joinWith join types") {
val ds1 = Seq(1, 2, 3).toDS().as("a")
val ds2 = Seq(1, 2).toDS().as("b")
val e1 = intercept[AnalysisException] {
ds1.joinWith(ds2, $"a.value" === $"b.value", "left_semi")
}.getMessage
assert(e1.contains("Invalid join type in joinWith: " + LeftSemi.sql))
val e2 = intercept[AnalysisException] {
ds1.joinWith(ds2, $"a.value" === $"b.value", "left_anti")
}.getMessage
assert(e2.contains("Invalid join type in joinWith: " + LeftAnti.sql))
}
test("groupBy function, keys") {
val ds = Seq(("a", 1), ("b", 1)).toDS()
val grouped = ds.groupByKey(v => (1, v._2))
checkDatasetUnorderly(
grouped.keys,
(1, 1))
}
test("groupBy function, map") {
val ds = Seq(("a", 10), ("a", 20), ("b", 1), ("b", 2), ("c", 1)).toDS()
val grouped = ds.groupByKey(v => (v._1, "word"))
val agged = grouped.mapGroups { case (g, iter) => (g._1, iter.map(_._2).sum) }
checkDatasetUnorderly(
agged,
("a", 30), ("b", 3), ("c", 1))
}
test("groupBy function, flatMap") {
val ds = Seq(("a", 10), ("a", 20), ("b", 1), ("b", 2), ("c", 1)).toDS()
val grouped = ds.groupByKey(v => (v._1, "word"))
val agged = grouped.flatMapGroups { case (g, iter) =>
Iterator(g._1, iter.map(_._2).sum.toString)
}
checkDatasetUnorderly(
agged,
"a", "30", "b", "3", "c", "1")
}
test("groupBy function, mapValues, flatMap") {
val ds = Seq(("a", 10), ("a", 20), ("b", 1), ("b", 2), ("c", 1)).toDS()
val keyValue = ds.groupByKey(_._1).mapValues(_._2)
val agged = keyValue.mapGroups { case (g, iter) => (g, iter.sum) }
checkDataset(agged, ("a", 30), ("b", 3), ("c", 1))
val keyValue1 = ds.groupByKey(t => (t._1, "key")).mapValues(t => (t._2, "value"))
val agged1 = keyValue1.mapGroups { case (g, iter) => (g._1, iter.map(_._1).sum) }
checkDataset(agged, ("a", 30), ("b", 3), ("c", 1))
}
test("groupBy function, reduce") {
val ds = Seq("abc", "xyz", "hello").toDS()
val agged = ds.groupByKey(_.length).reduceGroups(_ + _)
checkDatasetUnorderly(
agged,
3 -> "abcxyz", 5 -> "hello")
}
test("groupBy single field class, count") {
val ds = Seq("abc", "xyz", "hello").toDS()
val count = ds.groupByKey(s => Tuple1(s.length)).count()
checkDataset(
count,
(Tuple1(3), 2L), (Tuple1(5), 1L)
)
}
test("typed aggregation: expr") {
val ds = Seq(("a", 10), ("a", 20), ("b", 1), ("b", 2), ("c", 1)).toDS()
checkDatasetUnorderly(
ds.groupByKey(_._1).agg(sum("_2").as[Long]),
("a", 30L), ("b", 3L), ("c", 1L))
}
test("typed aggregation: expr, expr") {
val ds = Seq(("a", 10), ("a", 20), ("b", 1), ("b", 2), ("c", 1)).toDS()
checkDatasetUnorderly(
ds.groupByKey(_._1).agg(sum("_2").as[Long], sum($"_2" + 1).as[Long]),
("a", 30L, 32L), ("b", 3L, 5L), ("c", 1L, 2L))
}
test("typed aggregation: expr, expr, expr") {
val ds = Seq(("a", 10), ("a", 20), ("b", 1), ("b", 2), ("c", 1)).toDS()
checkDatasetUnorderly(
ds.groupByKey(_._1).agg(sum("_2").as[Long], sum($"_2" + 1).as[Long], count("*")),
("a", 30L, 32L, 2L), ("b", 3L, 5L, 2L), ("c", 1L, 2L, 1L))
}
test("typed aggregation: expr, expr, expr, expr") {
val ds = Seq(("a", 10), ("a", 20), ("b", 1), ("b", 2), ("c", 1)).toDS()
checkDatasetUnorderly(
ds.groupByKey(_._1).agg(
sum("_2").as[Long],
sum($"_2" + 1).as[Long],
count("*").as[Long],
avg("_2").as[Double]),
("a", 30L, 32L, 2L, 15.0), ("b", 3L, 5L, 2L, 1.5), ("c", 1L, 2L, 1L, 1.0))
}
test("cogroup") {
val ds1 = Seq(1 -> "a", 3 -> "abc", 5 -> "hello", 3 -> "foo").toDS()
val ds2 = Seq(2 -> "q", 3 -> "w", 5 -> "e", 5 -> "r").toDS()
val cogrouped = ds1.groupByKey(_._1).cogroup(ds2.groupByKey(_._1)) { case (key, data1, data2) =>
Iterator(key -> (data1.map(_._2).mkString + "#" + data2.map(_._2).mkString))
}
checkDatasetUnorderly(
cogrouped,
1 -> "a#", 2 -> "#q", 3 -> "abcfoo#w", 5 -> "hello#er")
}
test("cogroup with complex data") {
val ds1 = Seq(1 -> ClassData("a", 1), 2 -> ClassData("b", 2)).toDS()
val ds2 = Seq(2 -> ClassData("c", 3), 3 -> ClassData("d", 4)).toDS()
val cogrouped = ds1.groupByKey(_._1).cogroup(ds2.groupByKey(_._1)) { case (key, data1, data2) =>
Iterator(key -> (data1.map(_._2.a).mkString + data2.map(_._2.a).mkString))
}
checkDatasetUnorderly(
cogrouped,
1 -> "a", 2 -> "bc", 3 -> "d")
}
test("sample with replacement") {
val n = 100
val data = sparkContext.parallelize(1 to n, 2).toDS()
checkDataset(
data.sample(withReplacement = true, 0.05, seed = 13),
5, 10, 52, 73)
}
test("sample without replacement") {
val n = 100
val data = sparkContext.parallelize(1 to n, 2).toDS()
checkDataset(
data.sample(withReplacement = false, 0.05, seed = 13),
3, 17, 27, 58, 62)
}
test("sample fraction should not be negative with replacement") {
val data = sparkContext.parallelize(1 to 2, 1).toDS()
val errMsg = intercept[IllegalArgumentException] {
data.sample(withReplacement = true, -0.1, 0)
}.getMessage
assert(errMsg.contains("Sampling fraction (-0.1) must be nonnegative with replacement"))
// Sampling fraction can be greater than 1 with replacement.
checkDataset(
data.sample(withReplacement = true, 1.05, seed = 13),
1, 2)
}
test("sample fraction should be on interval [0, 1] without replacement") {
val data = sparkContext.parallelize(1 to 2, 1).toDS()
val errMsg1 = intercept[IllegalArgumentException] {
data.sample(withReplacement = false, -0.1, 0)
}.getMessage()
assert(errMsg1.contains(
"Sampling fraction (-0.1) must be on interval [0, 1] without replacement"))
val errMsg2 = intercept[IllegalArgumentException] {
data.sample(withReplacement = false, 1.1, 0)
}.getMessage()
assert(errMsg2.contains(
"Sampling fraction (1.1) must be on interval [0, 1] without replacement"))
}
test("SPARK-16686: Dataset.sample with seed results shouldn't depend on downstream usage") {
val simpleUdf = udf((n: Int) => {
require(n != 1, "simpleUdf shouldn't see id=1!")
1
})
val df = Seq(
(0, "string0"),
(1, "string1"),
(2, "string2"),
(3, "string3"),
(4, "string4"),
(5, "string5"),
(6, "string6"),
(7, "string7"),
(8, "string8"),
(9, "string9")
).toDF("id", "stringData")
val sampleDF = df.sample(false, 0.7, 50)
// After sampling, sampleDF doesn't contain id=1.
assert(!sampleDF.select("id").collect.contains(1))
// simpleUdf should not encounter id=1.
checkAnswer(sampleDF.select(simpleUdf($"id")), List.fill(sampleDF.count.toInt)(Row(1)))
}
test("SPARK-11436: we should rebind right encoder when join 2 datasets") {
val ds1 = Seq("1", "2").toDS().as("a")
val ds2 = Seq(2, 3).toDS().as("b")
val joined = ds1.joinWith(ds2, $"a.value" === $"b.value")
checkDataset(joined, ("2", 2))
}
test("self join") {
val ds = Seq("1", "2").toDS().as("a")
val joined = ds.joinWith(ds, lit(true), "cross")
checkDataset(joined, ("1", "1"), ("1", "2"), ("2", "1"), ("2", "2"))
}
test("toString") {
val ds = Seq((1, 2)).toDS()
assert(ds.toString == "[_1: int, _2: int]")
}
test("Kryo encoder") {
implicit val kryoEncoder = Encoders.kryo[KryoData]
val ds = Seq(KryoData(1), KryoData(2)).toDS()
assert(ds.groupByKey(p => p).count().collect().toSet ==
Set((KryoData(1), 1L), (KryoData(2), 1L)))
}
test("Kryo encoder self join") {
implicit val kryoEncoder = Encoders.kryo[KryoData]
val ds = Seq(KryoData(1), KryoData(2)).toDS()
assert(ds.joinWith(ds, lit(true), "cross").collect().toSet ==
Set(
(KryoData(1), KryoData(1)),
(KryoData(1), KryoData(2)),
(KryoData(2), KryoData(1)),
(KryoData(2), KryoData(2))))
}
test("Kryo encoder: check the schema mismatch when converting DataFrame to Dataset") {
implicit val kryoEncoder = Encoders.kryo[KryoData]
val df = Seq((1)).toDF("a")
val e = intercept[AnalysisException] {
df.as[KryoData]
}.message
assert(e.contains("cannot cast IntegerType to BinaryType"))
}
test("Java encoder") {
implicit val kryoEncoder = Encoders.javaSerialization[JavaData]
val ds = Seq(JavaData(1), JavaData(2)).toDS()
assert(ds.groupByKey(p => p).count().collect().toSet ==
Set((JavaData(1), 1L), (JavaData(2), 1L)))
}
test("Java encoder self join") {
implicit val kryoEncoder = Encoders.javaSerialization[JavaData]
val ds = Seq(JavaData(1), JavaData(2)).toDS()
assert(ds.joinWith(ds, lit(true), "cross").collect().toSet ==
Set(
(JavaData(1), JavaData(1)),
(JavaData(1), JavaData(2)),
(JavaData(2), JavaData(1)),
(JavaData(2), JavaData(2))))
}
test("SPARK-14696: implicit encoders for boxed types") {
assert(spark.range(1).map { i => i : java.lang.Long }.head == 0L)
}
test("SPARK-11894: Incorrect results are returned when using null") {
val nullInt = null.asInstanceOf[java.lang.Integer]
val ds1 = Seq((nullInt, "1"), (new java.lang.Integer(22), "2")).toDS()
val ds2 = Seq((nullInt, "1"), (new java.lang.Integer(22), "2")).toDS()
checkDataset(
ds1.joinWith(ds2, lit(true), "cross"),
((nullInt, "1"), (nullInt, "1")),
((nullInt, "1"), (new java.lang.Integer(22), "2")),
((new java.lang.Integer(22), "2"), (nullInt, "1")),
((new java.lang.Integer(22), "2"), (new java.lang.Integer(22), "2")))
}
test("change encoder with compatible schema") {
val ds = Seq(2 -> 2.toByte, 3 -> 3.toByte).toDF("a", "b").as[ClassData]
assert(ds.collect().toSeq == Seq(ClassData("2", 2), ClassData("3", 3)))
}
test("verify mismatching field names fail with a good error") {
val ds = Seq(ClassData("a", 1)).toDS()
val e = intercept[AnalysisException] {
ds.as[ClassData2]
}
assert(e.getMessage.contains("cannot resolve '`c`' given input columns: [a, b]"), e.getMessage)
}
test("runtime nullability check") {
val schema = StructType(Seq(
StructField("f", StructType(Seq(
StructField("a", StringType, nullable = true),
StructField("b", IntegerType, nullable = true)
)), nullable = true)
))
def buildDataset(rows: Row*): Dataset[NestedStruct] = {
val rowRDD = spark.sparkContext.parallelize(rows)
spark.createDataFrame(rowRDD, schema).as[NestedStruct]
}
checkDataset(
buildDataset(Row(Row("hello", 1))),
NestedStruct(ClassData("hello", 1))
)
// Shouldn't throw runtime exception when parent object (`ClassData`) is null
assert(buildDataset(Row(null)).collect() === Array(NestedStruct(null)))
val message = intercept[RuntimeException] {
buildDataset(Row(Row("hello", null))).collect()
}.getMessage
assert(message.contains("Null value appeared in non-nullable field"))
}
test("SPARK-12478: top level null field") {
val ds0 = Seq(NestedStruct(null)).toDS()
checkDataset(ds0, NestedStruct(null))
checkAnswer(ds0.toDF(), Row(null))
val ds1 = Seq(DeepNestedStruct(NestedStruct(null))).toDS()
checkDataset(ds1, DeepNestedStruct(NestedStruct(null)))
checkAnswer(ds1.toDF(), Row(Row(null)))
}
test("support inner class in Dataset") {
val outer = new OuterClass
OuterScopes.addOuterScope(outer)
val ds = Seq(outer.InnerClass("1"), outer.InnerClass("2")).toDS()
checkDataset(ds.map(_.a), "1", "2")
}
test("grouping key and grouped value has field with same name") {
val ds = Seq(ClassData("a", 1), ClassData("a", 2)).toDS()
val agged = ds.groupByKey(d => ClassNullableData(d.a, null)).mapGroups {
case (key, values) => key.a + values.map(_.b).sum
}
checkDataset(agged, "a3")
}
test("cogroup's left and right side has field with same name") {
val left = Seq(ClassData("a", 1), ClassData("b", 2)).toDS()
val right = Seq(ClassNullableData("a", 3), ClassNullableData("b", 4)).toDS()
val cogrouped = left.groupByKey(_.a).cogroup(right.groupByKey(_.a)) {
case (key, lData, rData) => Iterator(key + lData.map(_.b).sum + rData.map(_.b.toInt).sum)
}
checkDataset(cogrouped, "a13", "b24")
}
test("give nice error message when the real number of fields doesn't match encoder schema") {
val ds = Seq(ClassData("a", 1), ClassData("b", 2)).toDS()
val message = intercept[AnalysisException] {
ds.as[(String, Int, Long)]
}.message
assert(message ==
"Try to map struct<a:string,b:int> to Tuple3, " +
"but failed as the number of fields does not line up.")
val message2 = intercept[AnalysisException] {
ds.as[Tuple1[String]]
}.message
assert(message2 ==
"Try to map struct<a:string,b:int> to Tuple1, " +
"but failed as the number of fields does not line up.")
}
test("SPARK-13440: Resolving option fields") {
val df = Seq(1, 2, 3).toDS()
val ds = df.as[Option[Int]]
checkDataset(
ds.filter(_ => true),
Some(1), Some(2), Some(3))
}
test("SPARK-13540 Dataset of nested class defined in Scala object") {
checkDataset(
Seq(OuterObject.InnerClass("foo")).toDS(),
OuterObject.InnerClass("foo"))
}
test("SPARK-14000: case class with tuple type field") {
checkDataset(
Seq(TupleClass((1, "a"))).toDS(),
TupleClass((1, "a"))
)
}
test("isStreaming returns false for static Dataset") {
val data = Seq(("a", 1), ("b", 2), ("c", 3)).toDS()
assert(!data.isStreaming, "static Dataset returned true for 'isStreaming'.")
}
test("isStreaming returns true for streaming Dataset") {
val data = MemoryStream[Int].toDS()
assert(data.isStreaming, "streaming Dataset returned false for 'isStreaming'.")
}
test("isStreaming returns true after static and streaming Dataset join") {
val static = Seq(("a", 1), ("b", 2), ("c", 3)).toDF("a", "b")
val streaming = MemoryStream[Int].toDS().toDF("b")
val df = streaming.join(static, Seq("b"))
assert(df.isStreaming, "streaming Dataset returned false for 'isStreaming'.")
}
test("SPARK-14554: Dataset.map may generate wrong java code for wide table") {
val wideDF = spark.range(10).select(Seq.tabulate(1000) {i => ('id + i).as(s"c$i")} : _*)
// Make sure the generated code for this plan can compile and execute.
checkDataset(wideDF.map(_.getLong(0)), 0L until 10 : _*)
}
test("SPARK-14838: estimating sizeInBytes in operators with ObjectProducer shouldn't fail") {
val dataset = Seq(
(0, 3, 54f),
(0, 4, 44f),
(0, 5, 42f),
(1, 3, 39f),
(1, 5, 33f),
(1, 4, 26f),
(2, 3, 51f),
(2, 5, 45f),
(2, 4, 30f)
).toDF("user", "item", "rating")
val actual = dataset
.select("user", "item")
.as[(Int, Int)]
.groupByKey(_._1)
.mapGroups { case (src, ids) => (src, ids.map(_._2).toArray) }
.toDF("id", "actual")
dataset.join(actual, dataset("user") === actual("id")).collect()
}
test("SPARK-15097: implicits on dataset's spark can be imported") {
val dataset = Seq(1, 2, 3).toDS()
checkDataset(DatasetTransform.addOne(dataset), 2, 3, 4)
}
test("dataset.rdd with generic case class") {
val ds = Seq(Generic(1, 1.0), Generic(2, 2.0)).toDS()
val ds2 = ds.map(g => Generic(g.id, g.value))
assert(ds.rdd.map(r => r.id).count === 2)
assert(ds2.rdd.map(r => r.id).count === 2)
val ds3 = ds.map(g => new java.lang.Long(g.id))
assert(ds3.rdd.map(r => r).count === 2)
}
test("runtime null check for RowEncoder") {
val schema = new StructType().add("i", IntegerType, nullable = false)
val df = spark.range(10).map(l => {
if (l % 5 == 0) {
Row(null)
} else {
Row(l)
}
})(RowEncoder(schema))
val message = intercept[Exception] {
df.collect()
}.getMessage
assert(message.contains("The 0th field 'i' of input row cannot be null"))
}
test("row nullability mismatch") {
val schema = new StructType().add("a", StringType, true).add("b", StringType, false)
val rdd = spark.sparkContext.parallelize(Row(null, "123") :: Row("234", null) :: Nil)
val message = intercept[Exception] {
spark.createDataFrame(rdd, schema).collect()
}.getMessage
assert(message.contains("The 1th field 'b' of input row cannot be null"))
}
test("createTempView") {
val dataset = Seq(1, 2, 3).toDS()
dataset.createOrReplaceTempView("tempView")
// Overrides the existing temporary view with same name
// No exception should be thrown here.
dataset.createOrReplaceTempView("tempView")
// Throws AnalysisException if temp view with same name already exists
val e = intercept[AnalysisException](
dataset.createTempView("tempView"))
intercept[AnalysisException](dataset.createTempView("tempView"))
assert(e.message.contains("already exists"))
dataset.sparkSession.catalog.dropTempView("tempView")
}
test("SPARK-15381: physical object operator should define `reference` correctly") {
val df = Seq(1 -> 2).toDF("a", "b")
checkAnswer(df.map(row => row)(RowEncoder(df.schema)).select("b", "a"), Row(2, 1))
}
private def checkShowString[T](ds: Dataset[T], expected: String): Unit = {
val numRows = expected.split("\\n").length - 4
val actual = ds.showString(numRows, truncate = 20)
if (expected != actual) {
fail(
"Dataset.showString() gives wrong result:\\n\\n" + sideBySide(
"== Expected ==\\n" + expected,
"== Actual ==\\n" + actual
).mkString("\\n")
)
}
}
test("SPARK-15550 Dataset.show() should show contents of the underlying logical plan") {
val df = Seq((1, "foo", "extra"), (2, "bar", "extra")).toDF("b", "a", "c")
val ds = df.as[ClassData]
val expected =
"""+---+---+-----+
|| b| a| c|
|+---+---+-----+
|| 1|foo|extra|
|| 2|bar|extra|
|+---+---+-----+
|""".stripMargin
checkShowString(ds, expected)
}
test("SPARK-15550 Dataset.show() should show inner nested products as rows") {
val ds = Seq(
NestedStruct(ClassData("foo", 1)),
NestedStruct(ClassData("bar", 2))
).toDS()
val expected =
"""+-------+
|| f|
|+-------+
||[foo,1]|
||[bar,2]|
|+-------+
|""".stripMargin
checkShowString(ds, expected)
}
test(
"SPARK-15112: EmbedDeserializerInFilter should not optimize plan fragment that changes schema"
) {
val ds = Seq(1 -> "foo", 2 -> "bar").toDF("b", "a").as[ClassData]
assertResult(Seq(ClassData("foo", 1), ClassData("bar", 2))) {
ds.collect().toSeq
}
assertResult(Seq(ClassData("bar", 2))) {
ds.filter(_.b > 1).collect().toSeq
}
}
test("mapped dataset should resolve duplicated attributes for self join") {
val ds = Seq(1, 2, 3).toDS().map(_ + 1)
val ds1 = ds.as("d1")
val ds2 = ds.as("d2")
checkDatasetUnorderly(ds1.joinWith(ds2, $"d1.value" === $"d2.value"), (2, 2), (3, 3), (4, 4))
checkDatasetUnorderly(ds1.intersect(ds2), 2, 3, 4)
checkDatasetUnorderly(ds1.except(ds1))
}
test("SPARK-15441: Dataset outer join") {
val left = Seq(ClassData("a", 1), ClassData("b", 2)).toDS().as("left")
val right = Seq(ClassData("x", 2), ClassData("y", 3)).toDS().as("right")
val joined = left.joinWith(right, $"left.b" === $"right.b", "left")
val result = joined.collect().toSet
assert(result == Set(ClassData("a", 1) -> null, ClassData("b", 2) -> ClassData("x", 2)))
}
test("better error message when use java reserved keyword as field name") {
val e = intercept[UnsupportedOperationException] {
Seq(InvalidInJava(1)).toDS()
}
assert(e.getMessage.contains(
"`abstract` is a reserved keyword and cannot be used as field name"))
}
test("Dataset should support flat input object to be null") {
checkDataset(Seq("a", null).toDS(), "a", null)
}
test("Dataset should throw RuntimeException if top-level product input object is null") {
val e = intercept[RuntimeException](Seq(ClassData("a", 1), null).toDS())
assert(e.getMessage.contains("Null value appeared in non-nullable field"))
assert(e.getMessage.contains("top level Product input object"))
}
test("dropDuplicates") {
val ds = Seq(("a", 1), ("a", 2), ("b", 1), ("a", 1)).toDS()
checkDataset(
ds.dropDuplicates("_1"),
("a", 1), ("b", 1))
checkDataset(
ds.dropDuplicates("_2"),
("a", 1), ("a", 2))
checkDataset(
ds.dropDuplicates("_1", "_2"),
("a", 1), ("a", 2), ("b", 1))
}
test("dropDuplicates: columns with same column name") {
val ds1 = Seq(("a", 1), ("a", 2), ("b", 1), ("a", 1)).toDS()
val ds2 = Seq(("a", 1), ("a", 2), ("b", 1), ("a", 1)).toDS()
// The dataset joined has two columns of the same name "_2".
val joined = ds1.join(ds2, "_1").select(ds1("_2").as[Int], ds2("_2").as[Int])
checkDataset(
joined.dropDuplicates(),
(1, 2), (1, 1), (2, 1), (2, 2))
}
test("SPARK-16097: Encoders.tuple should handle null object correctly") {
val enc = Encoders.tuple(Encoders.tuple(Encoders.STRING, Encoders.STRING), Encoders.STRING)
val data = Seq((("a", "b"), "c"), (null, "d"))
val ds = spark.createDataset(data)(enc)
checkDataset(ds, (("a", "b"), "c"), (null, "d"))
}
test("SPARK-16995: flat mapping on Dataset containing a column created with lit/expr") {
val df = Seq("1").toDF("a")
import df.sparkSession.implicits._
checkDataset(
df.withColumn("b", lit(0)).as[ClassData]
.groupByKey(_.a).flatMapGroups { case (x, iter) => List[Int]() })
checkDataset(
df.withColumn("b", expr("0")).as[ClassData]
.groupByKey(_.a).flatMapGroups { case (x, iter) => List[Int]() })
}
test("SPARK-18125: Spark generated code causes CompileException") {
val data = Array(
Route("a", "b", 1),
Route("a", "b", 2),
Route("a", "c", 2),
Route("a", "d", 10),
Route("b", "a", 1),
Route("b", "a", 5),
Route("b", "c", 6))
val ds = sparkContext.parallelize(data).toDF.as[Route]
val grped = ds.map(r => GroupedRoutes(r.src, r.dest, Seq(r)))
.groupByKey(r => (r.src, r.dest))
.reduceGroups { (g1: GroupedRoutes, g2: GroupedRoutes) =>
GroupedRoutes(g1.src, g1.dest, g1.routes ++ g2.routes)
}.map(_._2)
val expected = Seq(
GroupedRoutes("a", "d", Seq(Route("a", "d", 10))),
GroupedRoutes("b", "c", Seq(Route("b", "c", 6))),
GroupedRoutes("a", "b", Seq(Route("a", "b", 1), Route("a", "b", 2))),
GroupedRoutes("b", "a", Seq(Route("b", "a", 1), Route("b", "a", 5))),
GroupedRoutes("a", "c", Seq(Route("a", "c", 2)))
)
implicit def ordering[GroupedRoutes]: Ordering[GroupedRoutes] = new Ordering[GroupedRoutes] {
override def compare(x: GroupedRoutes, y: GroupedRoutes): Int = {
x.toString.compareTo(y.toString)
}
}
checkDatasetUnorderly(grped, expected: _*)
}
test("SPARK-18189: Fix serialization issue in KeyValueGroupedDataset") {
val resultValue = 12345
val keyValueGrouped = Seq((1, 2), (3, 4)).toDS().groupByKey(_._1)
val mapGroups = keyValueGrouped.mapGroups((k, v) => (k, 1))
val broadcasted = spark.sparkContext.broadcast(resultValue)
// Using broadcast triggers serialization issue in KeyValueGroupedDataset
val dataset = mapGroups.map(_ => broadcasted.value)
assert(dataset.collect() sameElements Array(resultValue, resultValue))
}
test("SPARK-18284: Serializer should have correct nullable value") {
val df1 = Seq(1, 2, 3, 4).toDF
assert(df1.schema(0).nullable == false)
val df2 = Seq(Integer.valueOf(1), Integer.valueOf(2)).toDF
assert(df2.schema(0).nullable == true)
val df3 = Seq(Seq(1, 2), Seq(3, 4)).toDF
assert(df3.schema(0).nullable == true)
assert(df3.schema(0).dataType.asInstanceOf[ArrayType].containsNull == false)
val df4 = Seq(Seq("a", "b"), Seq("c", "d")).toDF
assert(df4.schema(0).nullable == true)
assert(df4.schema(0).dataType.asInstanceOf[ArrayType].containsNull == true)
val df5 = Seq((0, 1.0), (2, 2.0)).toDF("id", "v")
assert(df5.schema(0).nullable == false)
assert(df5.schema(1).nullable == false)
val df6 = Seq((0, 1.0, "a"), (2, 2.0, "b")).toDF("id", "v1", "v2")
assert(df6.schema(0).nullable == false)
assert(df6.schema(1).nullable == false)
assert(df6.schema(2).nullable == true)
val df7 = (Tuple1(Array(1, 2, 3)) :: Nil).toDF("a")
assert(df7.schema(0).nullable == true)
assert(df7.schema(0).dataType.asInstanceOf[ArrayType].containsNull == false)
val df8 = (Tuple1(Array((null: Integer), (null: Integer))) :: Nil).toDF("a")
assert(df8.schema(0).nullable == true)
assert(df8.schema(0).dataType.asInstanceOf[ArrayType].containsNull == true)
val df9 = (Tuple1(Map(2 -> 3)) :: Nil).toDF("m")
assert(df9.schema(0).nullable == true)
assert(df9.schema(0).dataType.asInstanceOf[MapType].valueContainsNull == false)
val df10 = (Tuple1(Map(1 -> (null: Integer))) :: Nil).toDF("m")
assert(df10.schema(0).nullable == true)
assert(df10.schema(0).dataType.asInstanceOf[MapType].valueContainsNull == true)
val df11 = Seq(TestDataPoint(1, 2.2, "a", null),
TestDataPoint(3, 4.4, "null", (TestDataPoint2(33, "b")))).toDF
assert(df11.schema(0).nullable == false)
assert(df11.schema(1).nullable == false)
assert(df11.schema(2).nullable == true)
assert(df11.schema(3).nullable == true)
assert(df11.schema(3).dataType.asInstanceOf[StructType].fields(0).nullable == false)
assert(df11.schema(3).dataType.asInstanceOf[StructType].fields(1).nullable == true)
}
Seq(true, false).foreach { eager =>
def testCheckpointing(testName: String)(f: => Unit): Unit = {
test(s"Dataset.checkpoint() - $testName (eager = $eager)") {
withTempDir { dir =>
val originalCheckpointDir = spark.sparkContext.checkpointDir
try {
spark.sparkContext.setCheckpointDir(dir.getCanonicalPath)
f
} finally {
// Since the original checkpointDir can be None, we need
// to set the variable directly.
spark.sparkContext.checkpointDir = originalCheckpointDir
}
}
}
}
testCheckpointing("basic") {
val ds = spark.range(10).repartition('id % 2).filter('id > 5).orderBy('id.desc)
val cp = ds.checkpoint(eager)
val logicalRDD = cp.logicalPlan match {
case plan: LogicalRDD => plan
case _ =>
val treeString = cp.logicalPlan.treeString(verbose = true)
fail(s"Expecting a LogicalRDD, but got\\n$treeString")
}
val dsPhysicalPlan = ds.queryExecution.executedPlan
val cpPhysicalPlan = cp.queryExecution.executedPlan
assertResult(dsPhysicalPlan.outputPartitioning) { logicalRDD.outputPartitioning }
assertResult(dsPhysicalPlan.outputOrdering) { logicalRDD.outputOrdering }
assertResult(dsPhysicalPlan.outputPartitioning) { cpPhysicalPlan.outputPartitioning }
assertResult(dsPhysicalPlan.outputOrdering) { cpPhysicalPlan.outputOrdering }
// For a lazy checkpoint() call, the first check also materializes the checkpoint.
checkDataset(cp, (9L to 6L by -1L).map(java.lang.Long.valueOf): _*)
// Reads back from checkpointed data and check again.
checkDataset(cp, (9L to 6L by -1L).map(java.lang.Long.valueOf): _*)
}
testCheckpointing("should preserve partitioning information") {
val ds = spark.range(10).repartition('id % 2)
val cp = ds.checkpoint(eager)
val agg = cp.groupBy('id % 2).agg(count('id))
agg.queryExecution.executedPlan.collectFirst {
case ShuffleExchange(_, _: RDDScanExec, _) =>
case BroadcastExchangeExec(_, _: RDDScanExec) =>
}.foreach { _ =>
fail(
"No Exchange should be inserted above RDDScanExec since the checkpointed Dataset " +
"preserves partitioning information:\\n\\n" + agg.queryExecution
)
}
checkAnswer(agg, ds.groupBy('id % 2).agg(count('id)))
}
}
test("identity map for primitive arrays") {
val arrayByte = Array(1.toByte, 2.toByte, 3.toByte)
val arrayInt = Array(1, 2, 3)
val arrayLong = Array(1.toLong, 2.toLong, 3.toLong)
val arrayDouble = Array(1.1, 2.2, 3.3)
val arrayString = Array("a", "b", "c")
val dsByte = sparkContext.parallelize(Seq(arrayByte), 1).toDS.map(e => e)
val dsInt = sparkContext.parallelize(Seq(arrayInt), 1).toDS.map(e => e)
val dsLong = sparkContext.parallelize(Seq(arrayLong), 1).toDS.map(e => e)
val dsDouble = sparkContext.parallelize(Seq(arrayDouble), 1).toDS.map(e => e)
val dsString = sparkContext.parallelize(Seq(arrayString), 1).toDS.map(e => e)
checkDataset(dsByte, arrayByte)
checkDataset(dsInt, arrayInt)
checkDataset(dsLong, arrayLong)
checkDataset(dsDouble, arrayDouble)
checkDataset(dsString, arrayString)
}
test("SPARK-18251: the type of Dataset can't be Option of Product type") {
checkDataset(Seq(Some(1), None).toDS(), Some(1), None)
val e = intercept[UnsupportedOperationException] {
Seq(Some(1 -> "a"), None).toDS()
}
assert(e.getMessage.contains("Cannot create encoder for Option of Product type"))
}
test ("SPARK-17460: the sizeInBytes in Statistics shouldn't overflow to a negative number") {
// Since the sizeInBytes in Statistics could exceed the limit of an Int, we should use BigInt
// instead of Int for avoiding possible overflow.
val ds = (0 to 10000).map( i =>
(i, Seq((i, Seq((i, "This is really not that long of a string")))))).toDS()
val sizeInBytes = ds.logicalPlan.stats.sizeInBytes
// sizeInBytes is 2404280404, before the fix, it overflows to a negative number
assert(sizeInBytes > 0)
}
test("SPARK-18717: code generation works for both scala.collection.Map" +
" and scala.collection.imutable.Map") {
val ds = Seq(WithImmutableMap("hi", Map(42L -> "foo"))).toDS
checkDataset(ds.map(t => t), WithImmutableMap("hi", Map(42L -> "foo")))
val ds2 = Seq(WithMap("hi", Map(42L -> "foo"))).toDS
checkDataset(ds2.map(t => t), WithMap("hi", Map(42L -> "foo")))
}
test("SPARK-18746: add implicit encoder for BigDecimal, date, timestamp") {
// For this implicit encoder, 18 is the default scale
assert(spark.range(1).map { x => new java.math.BigDecimal(1) }.head ==
new java.math.BigDecimal(1).setScale(18))
assert(spark.range(1).map { x => scala.math.BigDecimal(1, 18) }.head ==
scala.math.BigDecimal(1, 18))
assert(spark.range(1).map { x => new java.sql.Date(2016, 12, 12) }.head ==
new java.sql.Date(2016, 12, 12))
assert(spark.range(1).map { x => new java.sql.Timestamp(100000) }.head ==
new java.sql.Timestamp(100000))
}
test("SPARK-19896: cannot have circular references in in case class") {
val errMsg1 = intercept[UnsupportedOperationException] {
Seq(CircularReferenceClassA(null)).toDS
}
assert(errMsg1.getMessage.startsWith("cannot have circular references in class, but got the " +
"circular reference of class"))
val errMsg2 = intercept[UnsupportedOperationException] {
Seq(CircularReferenceClassC(null)).toDS
}
assert(errMsg2.getMessage.startsWith("cannot have circular references in class, but got the " +
"circular reference of class"))
val errMsg3 = intercept[UnsupportedOperationException] {
Seq(CircularReferenceClassD(null)).toDS
}
assert(errMsg3.getMessage.startsWith("cannot have circular references in class, but got the " +
"circular reference of class"))
}
test("SPARK-20125: option of map") {
val ds = Seq(WithMapInOption(Some(Map(1 -> 1)))).toDS()
checkDataset(ds, WithMapInOption(Some(Map(1 -> 1))))
}
test("SPARK-20399: do not unescaped regex pattern when ESCAPED_STRING_LITERALS is enabled") {
withSQLConf(SQLConf.ESCAPED_STRING_LITERALS.key -> "true") {
val data = Seq("\\u0020\\u0021\\u0023", "abc")
val df = data.toDF()
val rlike1 = df.filter("value rlike '^\\\\x20[\\\\x20-\\\\x23]+$'")
val rlike2 = df.filter($"value".rlike("^\\\\x20[\\\\x20-\\\\x23]+$"))
val rlike3 = df.filter("value rlike '^\\\\\\\\x20[\\\\\\\\x20-\\\\\\\\x23]+$'")
checkAnswer(rlike1, rlike2)
assert(rlike3.count() == 0)
}
}
test("SPARK-21538: Attribute resolution inconsistency in Dataset API") {
val df = spark.range(3).withColumnRenamed("id", "x")
val expected = Row(0) :: Row(1) :: Row (2) :: Nil
checkAnswer(df.sort("id"), expected)
checkAnswer(df.sort(col("id")), expected)
checkAnswer(df.sort($"id"), expected)
checkAnswer(df.sort('id), expected)
checkAnswer(df.orderBy("id"), expected)
checkAnswer(df.orderBy(col("id")), expected)
checkAnswer(df.orderBy($"id"), expected)
checkAnswer(df.orderBy('id), expected)
}
test("SPARK-21567: Dataset should work with type alias") {
checkDataset(
Seq(1).toDS().map(_ => ("", TestForTypeAlias.tupleTypeAlias)),
("", (1, 1)))
checkDataset(
Seq(1).toDS().map(_ => ("", TestForTypeAlias.nestedTupleTypeAlias)),
("", ((1, 1), 2)))
checkDataset(
Seq(1).toDS().map(_ => ("", TestForTypeAlias.seqOfTupleTypeAlias)),
("", Seq((1, 1), (2, 2))))
}
}
case class WithImmutableMap(id: String, map_test: scala.collection.immutable.Map[Long, String])
case class WithMap(id: String, map_test: scala.collection.Map[Long, String])
case class WithMapInOption(m: Option[scala.collection.Map[Int, Int]])
case class Generic[T](id: T, value: Double)
case class OtherTuple(_1: String, _2: Int)
case class TupleClass(data: (Int, String))
class OuterClass extends Serializable {
case class InnerClass(a: String)
}
object OuterObject {
case class InnerClass(a: String)
}
case class ClassData(a: String, b: Int)
case class ClassData2(c: String, d: Int)
case class ClassNullableData(a: String, b: Integer)
case class NestedStruct(f: ClassData)
case class DeepNestedStruct(f: NestedStruct)
case class InvalidInJava(`abstract`: Int)
/**
* A class used to test serialization using encoders. This class throws exceptions when using
* Java serialization -- so the only way it can be "serialized" is through our encoders.
*/
case class NonSerializableCaseClass(value: String) extends Externalizable {
override def readExternal(in: ObjectInput): Unit = {
throw new UnsupportedOperationException
}
override def writeExternal(out: ObjectOutput): Unit = {
throw new UnsupportedOperationException
}
}
/** Used to test Kryo encoder. */
class KryoData(val a: Int) {
override def equals(other: Any): Boolean = {
a == other.asInstanceOf[KryoData].a
}
override def hashCode: Int = a
override def toString: String = s"KryoData($a)"
}
object KryoData {
def apply(a: Int): KryoData = new KryoData(a)
}
/** Used to test Java encoder. */
class JavaData(val a: Int) extends Serializable {
override def equals(other: Any): Boolean = {
a == other.asInstanceOf[JavaData].a
}
override def hashCode: Int = a
override def toString: String = s"JavaData($a)"
}
object JavaData {
def apply(a: Int): JavaData = new JavaData(a)
}
/** Used to test importing dataset.spark.implicits._ */
object DatasetTransform {
def addOne(ds: Dataset[Int]): Dataset[Int] = {
import ds.sparkSession.implicits._
ds.map(_ + 1)
}
}
case class Route(src: String, dest: String, cost: Int)
case class GroupedRoutes(src: String, dest: String, routes: Seq[Route])
case class CircularReferenceClassA(cls: CircularReferenceClassB)
case class CircularReferenceClassB(cls: CircularReferenceClassA)
case class CircularReferenceClassC(ar: Array[CircularReferenceClassC])
case class CircularReferenceClassD(map: Map[String, CircularReferenceClassE])
case class CircularReferenceClassE(id: String, list: List[CircularReferenceClassD])
| narahari92/spark | sql/core/src/test/scala/org/apache/spark/sql/DatasetSuite.scala | Scala | apache-2.0 | 49,374 |
package com.jeff.megaupload.client
import java.io.FileInputStream
import java.net.{DatagramPacket, DatagramSocket, InetAddress, SocketTimeoutException}
import com.jeff.megaupload.constant.Constants.{PACKET_SIZE, PAYLOAD_SIZE}
import com.jeff.megaupload.constant.{Constants, Flags, PacketMan}
import scala.collection.mutable.{Map => MutMap}
class Client {
private val socket = new DatagramSocket()
private val readPacket = new DatagramPacket(new Array[Byte](PACKET_SIZE), 0, PACKET_SIZE)
private val PACKETS_IN_CHUNKS = 100000
private val buffer = new Array[Byte](PAYLOAD_SIZE)
def uploadFile(fileName: String, path: String, windowSize: Int, destAdd: InetAddress, destPort: Int): Unit = {
initTransfer(fileName, windowSize, destAdd, destPort)
processTransfer(new FileInputStream(path), windowSize, destAdd, destPort)
end(destAdd, destPort)
socket.close()
}
private def initTransfer(fileName: String, windowSize: Int, destAddress: InetAddress, destPort: Int): Unit = {
val prev = socket.getSoTimeout
socket.setSoTimeout(Constants.TIME_OUT)
val initPacket = PacketMan.makePacket(PacketMan.makeInitPayload(fileName, windowSize), destAddress, destPort)
var acknowledged = false
while (!acknowledged) {
socket.send(initPacket)
try {
socket.receive(readPacket)
acknowledged = true
} catch {
case s: SocketTimeoutException =>
case t: Throwable => throw t
}
}
socket.setSoTimeout(prev)
}
private def processTransfer(stream: FileInputStream, windowSize: Int, destAddress: InetAddress, destPort: Int): Unit = {
var seqNumber = 1
var done = false
while (!done) {
val min = seqNumber
val builder = MutMap[Int, Array[Byte]]()
for (i <- 0 until PACKETS_IN_CHUNKS if !done) {
val readCount = stream.read(buffer)
readCount match {
case -1 =>
done = true
case _ =>
val out = buffer.slice(0, readCount)
builder += seqNumber -> out
seqNumber += 1
}
}
val max = seqNumber - 1
sendData(builder, windowSize, destAddress, destPort, min, max)
}
}
private def sendData(packets: MutMap[Int, Array[Byte]], windowSize: Int,
destAddress: InetAddress, destPort: Int, min: Int, max: Int): Unit = {
var counter = min
while (counter <= max) {
val localMax = Math.min(counter + windowSize, max)
while (counter <= localMax) {
socket.send(PacketMan.makePacket(PacketMan.makePayload(counter,
packets.get(counter).get), destAddress, destPort))
counter += 1
}
socket.receive(readPacket)
var positive = false
var highest = 1
while (!positive) {
socket.receive(readPacket)
val seq = PacketMan.seqAndRawData(readPacket)._1
if (seq >= 0) {
positive = true
highest = seq
}
}
counter = highest
}
}
private def end(destAddress: InetAddress, destPort: Int): Unit = {
val prev = socket.getSoTimeout
socket.setSoTimeout(Constants.TIME_OUT)
val endPacket = PacketMan.makePacket(PacketMan.makeSeqPayload(Flags.END.id), destAddress, destPort)
var acknowledged = false
while (!acknowledged) {
socket.send(endPacket)
try {
socket.receive(readPacket)
acknowledged = true
} catch {
case s: SocketTimeoutException =>
case t: Throwable => throw t
}
}
socket.setSoTimeout(prev)
}
}
| jregistr/Academia | CSC445-Computer-Networks/MegaUpload/client/src/main/com/jeff/megaupload/client/Client.scala | Scala | mit | 3,568 |
/*
* Copyright 2014 β 2018 Paul Horn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package scalax.transducers.contrib
import scala.util.Try
import scalax.transducers.Reducer
import scalax.transducers.internal.Reduced
import org.reactivestreams.{Subscriber, Subscription}
import scala.annotation.tailrec
import java.util.concurrent.atomic.AtomicLong
import java.util.concurrent.{ArrayBlockingQueue, BlockingQueue}
final class PublisherState[A, B](downstream: Subscriber[_ >: B], bufferSize: Int = 1024) {
private[contrib] val reducer: Reducer[B, Unit] = new Reducer[B, Unit] {
def prepare(r: Unit, s: Reduced): Unit = ()
def apply(r: Unit, a: B, s: Reduced): Unit = sendRightValue(a)
def apply(r: Unit): Unit = ()
}
private[this] val upstreamSub = new AtomicSubscription
private[this] val reduced = new Reduced
private[this] val demand = new AtomicLong
private[this] val inputBuffer = new ArrayBlockingQueue[A](bufferSize)
private[this] val outputBuffer = new ArrayBlockingQueue[B](bufferSize)
def subscriber(reducer: Reducer[A, Unit]): Subscriber[A] = new Subscriber[A] {
def onSubscribe(s: Subscription): Unit =
upstreamSub.set(s)
def onError(t: Throwable): Unit =
downstream.onError(t)
def onComplete(): Unit =
downstream.onComplete()
def onNext(t: A): Unit =
safeSendLeftValue(t, reducer)
}
private[this] def safeSendLeftValue(a: A, reducer: Reducer[A, Unit]): Unit = {
if (demand.get() > 0) {
sendLeftValue(a, reducer)
}
else {
inputBuffer.offer(a)
()
}
}
def subscription(reducer: Reducer[A, Unit]): Subscription = new Subscription {
def request(n: Long): Unit = {
val outstanding =
drainBuffers(demand.addAndGet(n), reducer)
if (reduced.? && outputBuffer.isEmpty) {
downstream.onComplete()
upstreamSub.cancel()
}
else if (outstanding > 0) {
upstreamSub.request(n)
}
}
def cancel(): Unit = {
reduced(())
upstreamSub.cancel()
}
}
private[this] def drainBuffers(requested: Long, reducer: Reducer[A, Unit]): Long = {
val outstanding =
drainBuffer(requested, outputBuffer, sendRightValue)
drainBuffer(outstanding, inputBuffer, sendLeftValue(_: A, reducer))
}
private[this] def sendRightValue(b: B): Unit = {
if (demand.getAndDecrement > 0) {
downstream.onNext(b)
}
else {
demand.incrementAndGet()
outputBuffer.offer(b)
()
}
}
private[this] def sendLeftValue(a: A, reducer: Reducer[A, Unit]): Unit = {
Try {
if (!reduced.?) {
reducer((), a, reduced)
if (reduced.? && outputBuffer.isEmpty) {
downstream.onComplete()
upstreamSub.cancel()
}
}
} recover {
case t β downstream.onError(t)
}
()
}
private[this] def drainBuffer[X](requested: Long, queue: BlockingQueue[X], sending: X β Unit): Long = {
@tailrec
def go(requested: Long, buffered: Int): Long =
if (requested > 0 && buffered > 0) {
sending(queue.take())
go(demand.get(), queue.size())
}
else {
requested
}
go(requested, queue.size())
}
}
| knutwalker/transducers-scala | contrib/reactive-streams/src/main/scala/scalax/transducers/contrib/PublisherState.scala | Scala | apache-2.0 | 3,743 |
package se.gigurra.renderer.glimpl.drawstrategy.multi
import com.jogamp.common.nio.Buffers
import javax.media.opengl.GL3
import se.gigurra.renderer.Color
import se.gigurra.renderer.Transform
import se.gigurra.renderer.glimpl.megavbo.MegaVboModelDescriptor
import se.gigurra.renderer.glimpl.uniforms.SimpleUniform
class SimpleDrawBatch(val maxItems: Int) {
private val items = new Array[MegaVboModelDescriptor](maxItems)
private val vertexOffsets = Buffers.newDirectIntBuffer(maxItems)
private val vertexCounts = Buffers.newDirectIntBuffer(maxItems)
private val colorScales = Buffers.newDirectFloatBuffer(maxItems * 4)
private val transforms = Buffers.newDirectFloatBuffer(maxItems * 16)
private var nItems = 0
private var primType = 0
final def add(
item: MegaVboModelDescriptor,
transform: Transform,
colorScale: Color) {
items(nItems) = item
vertexOffsets.put(item.vertexOffset)
vertexCounts.put(item.nVertices)
colorScales.put(colorScale.array)
transforms.put(transform.array)
nItems += 1
primType = item.primType
}
final def draw(
gl3: GL3,
colorScaleUniform: SimpleUniform,
transformUniform: SimpleUniform) {
vertexOffsets.rewind()
vertexCounts.rewind()
colorScales.rewind()
transforms.rewind()
colorScaleUniform.setVectors(gl3, colorScales, nItems)
transformUniform.setMatrices(gl3, transforms, nItems)
gl3.glMultiDrawArrays(primType, vertexOffsets, vertexCounts, nItems)
}
final def reset() {
nItems = 0
}
final def canAdd(item: MegaVboModelDescriptor): Boolean = {
nItems * primType == nItems * item.primType && nItems < maxItems
}
final def nonEmpty(): Boolean = {
nItems != 0
}
} | GiGurra/gigurra-scala-2drenderer | src/main/scala/se/gigurra/renderer/glimpl/drawstrategy/multi/SimpleDrawBatch.scala | Scala | mit | 1,748 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.responsiblepeople
import config.ApplicationConfig
import connectors.DataCacheConnector
import controllers.actions.SuccessfulAuthAction
import models.responsiblepeople.ResponsiblePerson._
import models.responsiblepeople._
import org.joda.time.LocalDate
import org.jsoup.Jsoup
import org.mockito.Matchers.{eq => meq, _}
import org.mockito.Mockito.{verify, when}
import org.scalatestplus.mockito.MockitoSugar
import play.api.inject.bind
import play.api.inject.guice.GuiceApplicationBuilder
import play.api.test.Helpers._
import uk.gov.hmrc.http.cache.client.CacheMap
import utils.{AmlsSpec, AuthAction}
import scala.concurrent.Future
class PersonUKPassportControllerSpec extends AmlsSpec with MockitoSugar {
trait Fixture {
self =>
val request = addToken(authRequest)
val dataCacheConnector = mock[DataCacheConnector]
val mockApplicationConfig = mock[ApplicationConfig]
lazy val app = new GuiceApplicationBuilder()
.disable[com.kenshoo.play.metrics.PlayModule]
.overrides(bind[DataCacheConnector].to(dataCacheConnector))
.overrides(bind[AuthAction].to(SuccessfulAuthAction))
.overrides(bind[ApplicationConfig].to(mockApplicationConfig))
.build()
val controller = app.injector.instanceOf[PersonUKPassportController]
val emptyCache = CacheMap("", Map.empty)
val mockCacheMap = mock[CacheMap]
val personName = PersonName("firstname", None, "lastname")
val ukPassportNumber = "000000000"
}
"PersonUKPassportController" when {
"get is called" must {
"return OK" when {
"data is not present" in new Fixture {
val responsiblePeople = ResponsiblePerson(Some(personName))
when(controller.dataCacheConnector.fetch[Seq[ResponsiblePerson]](any(), any())(any(), any()))
.thenReturn(Future.successful(Some(Seq(responsiblePeople))))
val result = controller.get(1)(request)
status(result) must be(OK)
val document = Jsoup.parse(contentAsString(result))
document.select("input[name=ukPassportNumber]").`val` must be("")
document.getElementById("ukPassport-true").hasAttr("checked") must be(false)
document.getElementById("ukPassport-false").hasAttr("checked") must be(false)
}
"data is present" in new Fixture {
val responsiblePeople = ResponsiblePerson(
personName = Some(personName),
ukPassport = Some(
UKPassportYes("000000000")
)
)
when(controller.dataCacheConnector.fetch[Seq[ResponsiblePerson]](any(), any())(any(), any()))
.thenReturn(Future.successful(Some(Seq(responsiblePeople))))
val result = controller.get(1)(request)
status(result) must be(OK)
val document = Jsoup.parse(contentAsString(result))
document.select("input[name=ukPassportNumber]").`val` must be("000000000")
document.getElementById("ukPassport-true").hasAttr("checked") must be(true)
}
}
"display Not Found" when {
"a populated ResponsiblePeople model cannot be found" in new Fixture {
when(controller.dataCacheConnector.fetch[Seq[ResponsiblePerson]](any(), any())(any(), any()))
.thenReturn(Future.successful(Some(Seq(ResponsiblePerson()))))
val result = controller.get(1)(request)
status(result) must be(NOT_FOUND)
}
}
}
"post is called" when {
"edit is false" must {
"go to CountryOfBirthController" when {
"uk passport number is provided" in new Fixture {
val newRequest = requestWithUrlEncodedBody(
"ukPassport" -> "true",
"ukPassportNumber" -> ukPassportNumber
)
val responsiblePeople = ResponsiblePerson()
when(controller.dataCacheConnector.fetch[Seq[ResponsiblePerson]](any(), any())(any(), any()))
.thenReturn(Future.successful(Some(Seq(ResponsiblePerson(personName = Some(personName))))))
when(mockCacheMap.getEntry[Seq[ResponsiblePerson]](any())(any()))
.thenReturn(Some(Seq(responsiblePeople)))
when(controller.dataCacheConnector.fetchAll(any())(any()))
.thenReturn(Future.successful(Some(mockCacheMap)))
when(controller.dataCacheConnector.save(any(), any(), any())(any(), any()))
.thenReturn(Future.successful(mockCacheMap))
val result = controller.post(1)(newRequest)
status(result) must be(SEE_OTHER)
redirectLocation(result) must be(Some(controllers.responsiblepeople.routes.CountryOfBirthController.get(1).url))
}
}
"go to PersonNonUKPassportController" when {
"no uk passport" in new Fixture {
val newRequest = requestWithUrlEncodedBody(
"ukPassport" -> "false"
)
val responsiblePeople = ResponsiblePerson()
when(controller.dataCacheConnector.fetch[Seq[ResponsiblePerson]](any(), any())(any(), any()))
.thenReturn(Future.successful(Some(Seq(ResponsiblePerson(personName = Some(personName))))))
when(mockCacheMap.getEntry[Seq[ResponsiblePerson]](any())(any()))
.thenReturn(Some(Seq(responsiblePeople)))
when(controller.dataCacheConnector.fetchAll(any())(any()))
.thenReturn(Future.successful(Some(mockCacheMap)))
when(controller.dataCacheConnector.save(any(), any(), any())(any(), any()))
.thenReturn(Future.successful(mockCacheMap))
val result = controller.post(1)(newRequest)
status(result) must be(SEE_OTHER)
redirectLocation(result) must be(Some(controllers.responsiblepeople.routes.PersonNonUKPassportController.get(1).url))
}
"existing data is present" in new Fixture {
val newRequest = requestWithUrlEncodedBody(
"ukPassport" -> "false"
)
val responsiblePeople = ResponsiblePerson(
ukPassport = Some(UKPassportNo)
)
when(controller.dataCacheConnector.fetch[Seq[ResponsiblePerson]](any(), any())(any(), any()))
.thenReturn(Future.successful(Some(Seq(ResponsiblePerson(personName = Some(personName))))))
when(mockCacheMap.getEntry[Seq[ResponsiblePerson]](any())(any()))
.thenReturn(Some(Seq(responsiblePeople)))
when(controller.dataCacheConnector.fetchAll(any())(any()))
.thenReturn(Future.successful(Some(mockCacheMap)))
when(controller.dataCacheConnector.save(any(), any(), any())(any(), any()))
.thenReturn(Future.successful(mockCacheMap))
val result = controller.post(1, false)(newRequest)
status(result) must be(SEE_OTHER)
redirectLocation(result) must be(Some(controllers.responsiblepeople.routes.PersonNonUKPassportController.get(1, false).url))
}
}
}
"edit is true" must {
"go to PersonNonUKPassportController" when {
"data is changed from uk passport to non uk passport" in new Fixture {
val newRequest = requestWithUrlEncodedBody(
"ukPassport" -> "false"
)
val responsiblePeople = ResponsiblePerson(
ukPassport = Some(UKPassportYes(ukPassportNumber))
)
when(controller.dataCacheConnector.fetch[Seq[ResponsiblePerson]](any(), any())(any(), any()))
.thenReturn(Future.successful(Some(Seq(ResponsiblePerson(personName = Some(personName))))))
when(mockCacheMap.getEntry[Seq[ResponsiblePerson]](any())(any()))
.thenReturn(Some(Seq(responsiblePeople)))
when(controller.dataCacheConnector.fetchAll(any())(any()))
.thenReturn(Future.successful(Some(mockCacheMap)))
when(controller.dataCacheConnector.save(any(), any(), any())(any(), any()))
.thenReturn(Future.successful(mockCacheMap))
val result = controller.post(1, true)(newRequest)
status(result) must be(SEE_OTHER)
redirectLocation(result) must be(Some(controllers.responsiblepeople.routes.PersonNonUKPassportController.get(1, true).url))
}
}
"go to DetailedAnswersController" when {
"uk passport" in new Fixture {
val newRequest = requestWithUrlEncodedBody(
"ukPassport" -> "true",
"ukPassportNumber" -> ukPassportNumber
)
val responsiblePeople = ResponsiblePerson(
ukPassport = Some(UKPassportNo),
dateOfBirth = Some(DateOfBirth(new LocalDate(2001,12,1)))
)
when(controller.dataCacheConnector.fetch[Seq[ResponsiblePerson]](any(), any())(any(), any()))
.thenReturn(Future.successful(Some(Seq(ResponsiblePerson(personName = Some(personName))))))
when(mockCacheMap.getEntry[Seq[ResponsiblePerson]](any())(any()))
.thenReturn(Some(Seq(responsiblePeople)))
when(controller.dataCacheConnector.fetchAll(any())(any()))
.thenReturn(Future.successful(Some(mockCacheMap)))
when(controller.dataCacheConnector.save(any(), any(), any())(any(), any()))
.thenReturn(Future.successful(mockCacheMap))
val result = controller.post(1, true, Some(flowFromDeclaration))(newRequest)
status(result) must be(SEE_OTHER)
redirectLocation(result) must be(Some(controllers.responsiblepeople.routes.DetailedAnswersController.get(1, Some(flowFromDeclaration)).url))
}
"non uk passport has not been changed" in new Fixture {
val newRequest = requestWithUrlEncodedBody(
"ukPassport" -> "false"
)
val responsiblePeople = ResponsiblePerson(
ukPassport = Some(UKPassportNo)
)
when(controller.dataCacheConnector.fetch[Seq[ResponsiblePerson]](any(), any())(any(), any()))
.thenReturn(Future.successful(Some(Seq(ResponsiblePerson(personName = Some(personName))))))
when(mockCacheMap.getEntry[Seq[ResponsiblePerson]](any())(any()))
.thenReturn(Some(Seq(responsiblePeople)))
when(controller.dataCacheConnector.fetchAll(any())(any()))
.thenReturn(Future.successful(Some(mockCacheMap)))
when(controller.dataCacheConnector.save(any(), any(), any())(any(), any()))
.thenReturn(Future.successful(mockCacheMap))
val result = controller.post(1, true, Some(flowFromDeclaration))(newRequest)
status(result) must be(SEE_OTHER)
redirectLocation(result) must be(Some(controllers.responsiblepeople.routes.DetailedAnswersController.get(1, Some(flowFromDeclaration)).url))
}
}
}
"given invalid data" must {
"respond with BAD_REQUEST" in new Fixture {
val newRequest = requestWithUrlEncodedBody(
"ukPassport" -> "true",
"ukPassportNumber" -> "abc"
)
val responsiblePeople = ResponsiblePerson()
when(controller.dataCacheConnector.fetch[Seq[ResponsiblePerson]](any(), any())(any(), any()))
.thenReturn(Future.successful(Some(Seq(ResponsiblePerson(personName = Some(personName))))))
when(mockCacheMap.getEntry[Seq[ResponsiblePerson]](any())(any()))
.thenReturn(Some(Seq(responsiblePeople)))
when(controller.dataCacheConnector.fetchAll(any())(any()))
.thenReturn(Future.successful(Some(mockCacheMap)))
when(controller.dataCacheConnector.save(any(), any(), any())(any(), any()))
.thenReturn(Future.successful(mockCacheMap))
val result = controller.post(1)(newRequest)
status(result) must be(BAD_REQUEST)
}
}
"Responsible Person cannot be found with given index" must {
"respond with NOT_FOUND" in new Fixture {
val newRequest = requestWithUrlEncodedBody(
"ukPassport" -> "false"
)
val responsiblePeople = ResponsiblePerson()
when(controller.dataCacheConnector.fetch[Seq[ResponsiblePerson]](any(), any())(any(), any()))
.thenReturn(Future.successful(Some(Seq(ResponsiblePerson(personName = Some(personName))))))
when(mockCacheMap.getEntry[Seq[ResponsiblePerson]](any())(any()))
.thenReturn(Some(Seq(responsiblePeople)))
when(controller.dataCacheConnector.fetchAll(any())(any()))
.thenReturn(Future.successful(Some(mockCacheMap)))
when(controller.dataCacheConnector.save(any(), any(), any())(any(), any()))
.thenReturn(Future.successful(mockCacheMap))
val result = controller.post(10)(newRequest)
status(result) must be(NOT_FOUND)
}
}
}
}
it must {
"remove non uk passport data excluding date of birth" when {
"data is changed to uk passport" in new Fixture {
val dateOfBirth = DateOfBirth(LocalDate.parse("2000-01-01"))
val newRequest = requestWithUrlEncodedBody(
"ukPassport" -> "true",
"ukPassportNumber" -> ukPassportNumber
)
val responsiblePeople = ResponsiblePerson(
ukPassport = Some(UKPassportNo),
nonUKPassport = Some(NoPassport),
dateOfBirth = Some(dateOfBirth)
)
when(controller.dataCacheConnector.fetch[Seq[ResponsiblePerson]](any(), any())(any(), any()))
.thenReturn(Future.successful(Some(Seq(ResponsiblePerson(personName = Some(personName), dateOfBirth = Some(dateOfBirth))))))
when(mockCacheMap.getEntry[Seq[ResponsiblePerson]](any())(any()))
.thenReturn(Some(Seq(responsiblePeople)))
when(controller.dataCacheConnector.fetchAll(any())(any()))
.thenReturn(Future.successful(Some(mockCacheMap)))
when(controller.dataCacheConnector.save(any(), any(), any())(any(), any()))
.thenReturn(Future.successful(mockCacheMap))
val result = controller.post(1, true)(newRequest)
status(result) must be(SEE_OTHER)
verify(controller.dataCacheConnector)
.save[Seq[ResponsiblePerson]](any(), any(), meq(Seq(responsiblePeople.copy(
ukPassport = Some(UKPassportYes(ukPassportNumber)),
nonUKPassport = None,
dateOfBirth = Some(dateOfBirth),
hasChanged = true
))))(any(), any())
}
}
}
}
| hmrc/amls-frontend | test/controllers/responsiblepeople/PersonUKPassportControllerSpec.scala | Scala | apache-2.0 | 15,264 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import org.apache.spark.{SparkConf, SparkFunSuite}
import org.apache.spark.serializer.JavaSerializer
import org.apache.spark.sql.test.SharedSQLContext
class SerializationSuite extends SparkFunSuite with SharedSQLContext {
test("[SPARK-5235] SQLContext should be serializable") {
val spark = SparkSession.builder.getOrCreate()
new JavaSerializer(new SparkConf()).newInstance().serialize(spark.sqlContext)
}
test("[SPARK-26409] SQLConf should be serializable") {
val spark = SparkSession.builder.getOrCreate()
new JavaSerializer(new SparkConf()).newInstance().serialize(spark.sessionState.conf)
}
}
| WindCanDie/spark | sql/core/src/test/scala/org/apache/spark/sql/SerializationSuite.scala | Scala | apache-2.0 | 1,456 |
/**
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.actor
import akka.testkit.{AkkaSpec => MyFavoriteTestFrameWorkPlusAkkaTestKit}
import language.postfixOps
//#test-code
import scala.collection.immutable
class FSMDocSpec extends MyFavoriteTestFrameWorkPlusAkkaTestKit {
//#fsm-code-elided
//#simple-imports
import scala.concurrent.duration._
//#simple-imports
//#simple-events
// received events
case class SetTarget(ref: ActorRef)
case class Queue(obj: Any)
case object Flush
// sent events
case class Batch(obj: immutable.Seq[Any])
//#simple-events
//#simple-state
// states
sealed trait State
case object Idle extends State
case object Active extends State
sealed trait Data
case object Uninitialized extends Data
case class Todo(target: ActorRef, queue: immutable.Seq[Any]) extends Data
//#simple-state
//#simple-fsm
class Buncher extends Actor with FSM[State, Data] {
//#fsm-body
startWith(Idle, Uninitialized)
//#when-syntax
when(Idle) {
case Event(SetTarget(ref), Uninitialized) =>
stay using Todo(ref, Vector.empty)
}
//#when-syntax
//#transition-elided
onTransition {
case Active -> Idle =>
stateData match {
case Todo(ref, queue) => ref ! Batch(queue)
}
}
//#transition-elided
//#when-syntax
when(Active, stateTimeout = 1 second) {
case Event(Flush | StateTimeout, t: Todo) =>
goto(Idle) using t.copy(queue = Vector.empty)
}
//#when-syntax
//#unhandled-elided
whenUnhandled {
// common code for both states
case Event(Queue(obj), t @ Todo(_, v)) =>
goto(Active) using t.copy(queue = v :+ obj)
case Event(e, s) =>
log.warning("received unhandled request {} in state {}/{}", e, stateName, s)
stay
}
//#unhandled-elided
//#fsm-body
initialize()
}
//#simple-fsm
object DemoCode {
trait StateType
case object SomeState extends StateType
case object Processing extends StateType
case object Error extends StateType
case object Idle extends StateType
case object Active extends StateType
class Dummy extends Actor with FSM[StateType, Int] {
class X
val newData = 42
object WillDo
object Tick
//#modifier-syntax
when(SomeState) {
case Event(msg, _) =>
goto(Processing) using (newData) forMax (5 seconds) replying (WillDo)
}
//#modifier-syntax
//#transition-syntax
onTransition {
case Idle -> Active => setTimer("timeout", Tick, 1 second, true)
case Active -> _ => cancelTimer("timeout")
case x -> Idle => log.info("entering Idle from " + x)
}
//#transition-syntax
//#alt-transition-syntax
onTransition(handler _)
def handler(from: StateType, to: StateType) {
// handle it here ...
}
//#alt-transition-syntax
//#stop-syntax
when(Error) {
case Event("stop", _) =>
// do cleanup ...
stop()
}
//#stop-syntax
//#transform-syntax
when(SomeState)(transform {
case Event(bytes: ByteString, read) => stay using (read + bytes.length)
} using {
case s @ FSM.State(state, read, timeout, stopReason, replies) if read > 1000 =>
goto(Processing)
})
//#transform-syntax
//#alt-transform-syntax
val processingTrigger: PartialFunction[State, State] = {
case s @ FSM.State(state, read, timeout, stopReason, replies) if read > 1000 =>
goto(Processing)
}
when(SomeState)(transform {
case Event(bytes: ByteString, read) => stay using (read + bytes.length)
} using processingTrigger)
//#alt-transform-syntax
//#termination-syntax
onTermination {
case StopEvent(FSM.Normal, state, data) => // ...
case StopEvent(FSM.Shutdown, state, data) => // ...
case StopEvent(FSM.Failure(cause), state, data) => // ...
}
//#termination-syntax
//#unhandled-syntax
whenUnhandled {
case Event(x: X, data) =>
log.info("Received unhandled event: " + x)
stay
case Event(msg, _) =>
log.warning("Received unknown event: " + msg)
goto(Error)
}
//#unhandled-syntax
}
//#logging-fsm
class MyFSM extends Actor with LoggingFSM[StateType, Data] {
//#body-elided
override def logDepth = 12
onTermination {
case StopEvent(FSM.Failure(_), state, data) =>
val lastEvents = getLog.mkString("\\n\\t")
log.warning("Failure in state " + state + " with data " + data + "\\n" +
"Events leading up to this point:\\n\\t" + lastEvents)
}
// ...
//#body-elided
}
//#logging-fsm
}
//#fsm-code-elided
"simple finite state machine" must {
"demonstrate NullFunction" in {
class A extends Actor with FSM[Int, Null] {
val SomeState = 0
//#NullFunction
when(SomeState)(FSM.NullFunction)
//#NullFunction
}
}
"batch correctly" in {
val buncher = system.actorOf(Props(classOf[Buncher], this))
buncher ! SetTarget(testActor)
buncher ! Queue(42)
buncher ! Queue(43)
expectMsg(Batch(immutable.Seq(42, 43)))
buncher ! Queue(44)
buncher ! Flush
buncher ! Queue(45)
expectMsg(Batch(immutable.Seq(44)))
expectMsg(Batch(immutable.Seq(45)))
}
"not batch if uninitialized" in {
val buncher = system.actorOf(Props(classOf[Buncher], this))
buncher ! Queue(42)
expectNoMsg
}
}
}
//#test-code
| ktoso/asciidoctor-sbt-plugin | src/sbt-test/sbt-asciidoctor/simple-doc/src/test/scala/akka/actor/FSMDocSpec.scala | Scala | apache-2.0 | 5,755 |
package models.gitolite
import org.joda.time.DateTime
import org.specs2.mutable._
import scalikejdbc._
import scalikejdbc.specs2.mutable.AutoRollback
class WikisSpec extends Specification {
"Wikis" should {
val w = Wikis.syntax("w")
"find by primary keys" in new AutoRollback {
val maybeFound = Wikis.find(123)
maybeFound.isDefined should beTrue
}
"find by where clauses" in new AutoRollback {
val maybeFound = Wikis.findBy(sqls.eq(w.id, 123))
maybeFound.isDefined should beTrue
}
"find all records" in new AutoRollback {
val allResults = Wikis.findAll()
allResults.size should be_>(0)
}
"count all records" in new AutoRollback {
val count = Wikis.countAll()
count should be_>(0L)
}
"find all by where clauses" in new AutoRollback {
val results = Wikis.findAllBy(sqls.eq(w.id, 123))
results.size should be_>(0)
}
"count by where clauses" in new AutoRollback {
val count = Wikis.countBy(sqls.eq(w.id, 123))
count should be_>(0L)
}
"create new record" in new AutoRollback {
val created = Wikis.create(createdAt = DateTime.now, updatedAt = DateTime.now)
created should not beNull
}
"save a record" in new AutoRollback {
val entity = Wikis.findAll().head
// TODO modify something
val modified = entity
val updated = Wikis.save(modified)
updated should not equalTo(entity)
}
"destroy a record" in new AutoRollback {
val entity = Wikis.findAll().head
Wikis.destroy(entity)
val shouldBeNone = Wikis.find(123)
shouldBeNone.isDefined should beFalse
}
}
}
| thomaschoo/gitolite-to-gitbucket | src/test/scala/models/gitolite/WikisSpec.scala | Scala | mit | 1,671 |
package ru.tmtool.math
import ru.tmtool.math.arithmetic.ClosedInterval
import ru.tmtool.math.arithmetic.ClosedInterval.SpecialDecimal
import ru.tmtool.math.probability.distributions.{Distribution, UniformDistribution}
import ru.tmtool.math.probability.generators.Generator
import ru.tmtool.math.probability.generators.logging.BasicLoggedGenerator
/**
* User: Sergey Kozlov [email protected]
* Date: 15.08.2014
* Time: 10:58
*/
package object probability {
def random[T](e1: T, other: (T, BigDecimal)*)(implicit generator: Generator = BasicLoggedGenerator): T = Distribution(e1, other:_*).random()
def randomBetween(min: BigDecimal, max: BigDecimal)(implicit generator: Generator = BasicLoggedGenerator): BigDecimal = UniformDistribution(min toInclusive max).random()
def randomFrom(interval: ClosedInterval)(implicit generator: Generator = BasicLoggedGenerator): BigDecimal = UniformDistribution(interval).random()
}
| tmtool/math | src/main/scala/ru/tmtool/math/probability/package.scala | Scala | mit | 929 |
package merkelonrails.ast
import org.objectweb.asm.MethodVisitor
import merkelonrails.SymbolTable
import org.objectweb.asm.Opcodes._
import org.objectweb.asm.Label
case class ConditionNode(condition: OperandNode, ifBranch: List[AstNode], elseBranch: List[AstNode]) extends StatementNode {
def generate(mv: MethodVisitor, symbolTable: SymbolTable) {
val conclude = new Label()
val falseLabel = new Label()
condition.generate(mv, symbolTable)
mv.visitJumpInsn(IFEQ, falseLabel)
ifBranch.foreach(_.generate(mv, symbolTable))
mv.visitJumpInsn(GOTO, conclude)
mv.visitLabel(falseLabel)
mv.visitFrame(F_FULL, symbolTable.size(), symbolTable.getStackFrame, 0, null)
elseBranch.foreach(_.generate(mv, symbolTable))
mv.visitJumpInsn(GOTO, conclude)
mv.visitLabel(conclude)
mv.visitFrame(F_SAME, 0, null, 0, null)
}
}
| ubertroll/MerkelOnRails | src/main/scala/merkelonrails/ast/ConditionNode.scala | Scala | apache-2.0 | 863 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.hive
/** Support for interacting with different versions of the HiveMetastoreClient */
package object client {
private[hive] sealed abstract class HiveVersion(
val fullVersion: String,
val extraDeps: Seq[String] = Nil,
val exclusions: Seq[String] = Nil)
// scalastyle:off
private[hive] object hive {
case object v12 extends HiveVersion("0.12.0")
case object v13 extends HiveVersion("0.13.1")
// Hive 0.14 depends on calcite 0.9.2-incubating-SNAPSHOT which does not exist in
// maven central anymore, so override those with a version that exists.
//
// The other excluded dependencies are also nowhere to be found, so exclude them explicitly. If
// they're needed by the metastore client, users will have to dig them out of somewhere and use
// configuration to point Spark at the correct jars.
case object v14 extends HiveVersion("0.14.0",
extraDeps = Seq("org.apache.calcite:calcite-core:1.3.0-incubating",
"org.apache.calcite:calcite-avatica:1.3.0-incubating"),
exclusions = Seq("org.pentaho:pentaho-aggdesigner-algorithm"))
case object v1_0 extends HiveVersion("1.0.0",
exclusions = Seq("eigenbase:eigenbase-properties",
"org.pentaho:pentaho-aggdesigner-algorithm",
"net.hydromatic:linq4j",
"net.hydromatic:quidem"))
// The curator dependency was added to the exclusions here because it seems to confuse the ivy
// library. org.apache.curator:curator is a pom dependency but ivy tries to find the jar for it,
// and fails.
case object v1_1 extends HiveVersion("1.1.0",
exclusions = Seq("eigenbase:eigenbase-properties",
"org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm",
"net.hydromatic:linq4j",
"net.hydromatic:quidem"))
case object v1_2 extends HiveVersion("1.2.2",
exclusions = Seq("eigenbase:eigenbase-properties",
"org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm",
"net.hydromatic:linq4j",
"net.hydromatic:quidem"))
case object v2_0 extends HiveVersion("2.0.1",
exclusions = Seq("org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm"))
case object v2_1 extends HiveVersion("2.1.1",
exclusions = Seq("org.apache.curator:*",
"org.pentaho:pentaho-aggdesigner-algorithm"))
val allSupportedHiveVersions = Set(v12, v13, v14, v1_0, v1_1, v1_2, v2_0, v2_1)
}
// scalastyle:on
}
| 1haodian/spark | sql/hive/src/main/scala/org/apache/spark/sql/hive/client/package.scala | Scala | apache-2.0 | 3,322 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark
import java.io.{FileDescriptor, InputStream}
import java.lang
import java.nio.ByteBuffer
import scala.collection.JavaConverters._
import scala.collection.mutable
import org.apache.hadoop.fs._
import org.apache.spark.internal.Logging
object DebugFilesystem extends Logging {
// Stores the set of active streams and their creation sites.
private val openStreams = mutable.Map.empty[FSDataInputStream, Throwable]
def addOpenStream(stream: FSDataInputStream): Unit = openStreams.synchronized {
openStreams.put(stream, new Throwable())
}
def clearOpenStreams(): Unit = openStreams.synchronized {
openStreams.clear()
}
def removeOpenStream(stream: FSDataInputStream): Unit = openStreams.synchronized {
openStreams.remove(stream)
}
def assertNoOpenStreams(): Unit = openStreams.synchronized {
val numOpen = openStreams.values.size
if (numOpen > 0) {
for (exc <- openStreams.values) {
logWarning("Leaked filesystem connection created at:")
exc.printStackTrace()
}
throw new IllegalStateException(s"There are $numOpen possibly leaked file streams.",
openStreams.values.head)
}
}
}
/**
* DebugFilesystem wraps file open calls to track all open connections. This can be used in tests
* to check that connections are not leaked.
*/
// TODO(ekl) we should consider always interposing this to expose num open conns as a metric
class DebugFilesystem extends LocalFileSystem {
import DebugFilesystem._
override def open(f: Path, bufferSize: Int): FSDataInputStream = {
val wrapped: FSDataInputStream = super.open(f, bufferSize)
addOpenStream(wrapped)
new FSDataInputStream(wrapped.getWrappedStream) {
override def setDropBehind(dropBehind: lang.Boolean): Unit = wrapped.setDropBehind(dropBehind)
override def getWrappedStream: InputStream = wrapped.getWrappedStream
override def getFileDescriptor: FileDescriptor = wrapped.getFileDescriptor
override def getPos: Long = wrapped.getPos
override def seekToNewSource(targetPos: Long): Boolean = wrapped.seekToNewSource(targetPos)
override def seek(desired: Long): Unit = wrapped.seek(desired)
override def setReadahead(readahead: lang.Long): Unit = wrapped.setReadahead(readahead)
override def read(position: Long, buffer: Array[Byte], offset: Int, length: Int): Int =
wrapped.read(position, buffer, offset, length)
override def read(buf: ByteBuffer): Int = wrapped.read(buf)
override def readFully(position: Long, buffer: Array[Byte], offset: Int, length: Int): Unit =
wrapped.readFully(position, buffer, offset, length)
override def readFully(position: Long, buffer: Array[Byte]): Unit =
wrapped.readFully(position, buffer)
override def available(): Int = wrapped.available()
override def mark(readlimit: Int): Unit = wrapped.mark(readlimit)
override def skip(n: Long): Long = wrapped.skip(n)
override def markSupported(): Boolean = wrapped.markSupported()
override def close(): Unit = {
try {
wrapped.close()
} finally {
removeOpenStream(wrapped)
}
}
override def read(): Int = wrapped.read()
override def reset(): Unit = wrapped.reset()
override def toString: String = wrapped.toString
override def equals(obj: scala.Any): Boolean = wrapped.equals(obj)
override def hashCode(): Int = wrapped.hashCode()
}
}
}
| bravo-zhang/spark | core/src/test/scala/org/apache/spark/DebugFilesystem.scala | Scala | apache-2.0 | 4,312 |
package org.scalameta.annotations
import scala.language.experimental.macros
import scala.annotation.StaticAnnotation
import scala.reflect.macros.whitebox.Context
class contextful[T] extends StaticAnnotation {
def macroTransform(annottees: Any*): Any = macro ContextfulMacros.impl
}
class ContextfulMacros(val c: Context) {
import c.universe._
import Flag._
def impl(annottees: Tree*): Tree = {
val q"new $_[$t]().macroTransform(..$_)" = c.macroApplication
def mkContextParameter(): ValDef = {
val prefix = if (t.toString.contains("SourceContext")) "src" else "c"
val name = c.freshName(TermName(prefix))
q"$SYNTHETIC implicit val $name: $t"
}
def transformMods(mods: Modifiers): Modifiers = {
val footprint = q"new _root_.org.scalameta.annotations.internal.contextful[$t]"
Modifiers(mods.flags, mods.privateWithin, mods.annotations ++ List(footprint))
}
def transformCdef(cdef: ClassDef): ClassDef = {
val q"$mods class $tpname[..$tparams] $ctorMods(...$paramss)(implicit ..$iparams) extends { ..$earlydefns } with ..$parents { $self => ..$stats }" = cdef
val iparams1 = iparams :+ mkContextParameter()
val mods1 = transformMods(mods)
q"$mods1 class $tpname[..$tparams] $ctorMods(...$paramss)(implicit ..$iparams1) extends { ..$earlydefns } with ..$parents { $self => ..$stats }"
}
def transformDdef(ddef: DefDef): DefDef = {
val q"$mods def $name[..$tparams](...$paramss)(implicit ..$iparams): $tpt = $body" = ddef
val iparams1 = iparams :+ mkContextParameter()
val mods1 = transformMods(mods)
q"$mods1 def $name[..$tparams](...$paramss)(implicit ..$iparams1): $tpt = $body"
}
val expanded = annottees match {
case (cdef: ClassDef) :: rest if !cdef.mods.hasFlag(TRAIT) => transformCdef(cdef) :: rest
case (ddef: DefDef) :: rest => transformDdef(ddef) :: rest
case annottee :: rest => c.abort(annottee.pos, "only classes and methods can be @contextful")
}
q"{ ..$expanded; () }"
}
} | beni55/scalameta | foundation/src/main/scala/org/scalameta/annotations/contextful.scala | Scala | bsd-3-clause | 2,039 |
package org.deepdive.test.unit
import akka.actor._
import akka.testkit._
import org.deepdive.calibration._
import org.deepdive.inference._
import org.scalatest._
import org.deepdive.settings.{BooleanType, VariableDataType}
import scala.util.Success
class Forwarder(target: ActorRef) extends Actor {
def receive = { case x => target.forward(x) }
}
class TestInferenceManager(
val taskManager: ActorRef,
val samplerProbe: ActorRef,
val factorGraphBuilderProbe: ActorRef,
val cdwProbe: ActorRef,
val variableSchema: Map[String, _ <: VariableDataType])
extends InferenceManager with MemoryInferenceDataStoreComponent {
def factorGraphBuilderProps = Props(classOf[Forwarder], factorGraphBuilderProbe)
override def samplerProps = Props(classOf[Forwarder], samplerProbe)
override def calibrationDataWriterProps = Props(classOf[Forwarder], cdwProbe)
}
class InferenceManagerSpec(_system: ActorSystem) extends TestKit(_system) with FunSpecLike with ImplicitSender {
def this() = this(ActorSystem("InferenceManagerSpec"))
val taskManager = TestProbe()
val sampler = TestProbe()
val factorGraphBuilder = TestProbe()
val cdw = TestProbe()
val schema = Map("r1.c1" -> BooleanType, "r2.c1" -> BooleanType, "r2.c2" -> BooleanType)
def actorProps = Props(classOf[TestInferenceManager], taskManager.ref, sampler.ref,
factorGraphBuilder.ref, cdw.ref, schema)
describe("Grounding the factor graph") {
// TODO
it("should work")(pending)
}
describe("Running inference") {
it("should work") {
val actor = TestActorRef(actorProps)
actor ! InferenceManager.RunInference("javaArgs", "samplerOptions")
sampler.expectMsgClass(classOf[Sampler.Run])
sampler.reply("Done")
expectMsg(())
}
}
describe("Writing calibration data") {
it("should work") {
val actor = TestActorRef(actorProps)
actor ! InferenceManager.WriteCalibrationData
cdw.expectMsgClass(classOf[CalibrationDataWriter.WriteCalibrationData])
cdw.reply("Done")
cdw.expectMsgClass(classOf[CalibrationDataWriter.WriteCalibrationData])
cdw.reply("Done")
cdw.expectMsgClass(classOf[CalibrationDataWriter.WriteCalibrationData])
cdw.reply("Done")
expectMsg(List("Done", "Done", "Done"))
}
}
} | dennybritz/deepdive | src/test/scala/unit/inference/InferenceManagerSpec.scala | Scala | apache-2.0 | 2,298 |
package org.jetbrains.plugins.dotty.lang.parser.parsing.expressions
/**
* @author adkozlov
*/
object Expr extends org.jetbrains.plugins.scala.lang.parser.parsing.expressions.Expr {
override protected def expr1 = Expr1
override protected def bindings = Bindings
}
| ilinum/intellij-scala | src/org/jetbrains/plugins/dotty/lang/parser/parsing/expressions/Expr.scala | Scala | apache-2.0 | 272 |
package com.hooboy.etjo.game
import org.newdawn.slick._
import com.hooboy.etjo.render._
import scala.io.Source
import com.hooboy.etjo.editor.main.Editor
import com.hooboy.etjo.editor.main.Editor
import com.hooboy.etjo.editor.main.Editor
import com.hooboy.etjo.editor.main.Editor
import com.hooboy.etjo.editor.main.Editor
import com.hooboy.etjo.editor.main.Editor
import com.hooboy.etjo.editor.main.Editor
import com.hooboy.etjo.editor.main.Editor
import com.hooboy.etjo.editor.main.Editor
import com.hooboy.etjo.editor.main.Editor
import javax.swing.JFileChooser
import java.io.File
class ETJO extends BasicGame("Electronic Turbo Jerk-Off") {
val map = new GameMap(this, Source.fromFile(chooseFile).mkString.split(";").toSeq.map { row => row.split(",").toSeq.map { str => TileType.tiles(str.toInt) } })
private val renderer = new Renderer(map.width, map.height)
val players = Array(
new Player(this, PlayerConfig(Color.white,
Input.KEY_A,
Input.KEY_D,
Input.KEY_W,
Input.KEY_S,
0,
0.5F,
0.5F), map),
new Player(this, PlayerConfig(Color.black,
Input.KEY_LEFT,
Input.KEY_RIGHT,
Input.KEY_UP,
Input.KEY_DOWN,
2,
7.5F,
0.5F), map))
def chooseFile(): File = {
var chooser = new JFileChooser();
chooser.setCurrentDirectory(new java.io.File("C:\\\\Users\\\\Benjamin\\\\Desktop\\\\etjo workspace\\\\etjo\\\\saves"));
chooser.setDialogTitle("Choose a map");
chooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
chooser.setAcceptAllFileFilterUsed(false);
if (chooser.showOpenDialog(null) == JFileChooser.APPROVE_OPTION) {
return chooser.getSelectedFile()
} else {
throw new IllegalStateException("No Selection")
}
}
override def init(container: GameContainer) {
}
override def update(container: GameContainer, delta: Int) {
WorldObjectRegistry tick (container, delta)
}
override def render(container: GameContainer, g: Graphics) {
renderer flush;
WorldObjectRegistry draw renderer
renderer render (container, g)
}
} | Z6fans/etjo | src/com/hooboy/etjo/game/ETJO.scala | Scala | mit | 2,078 |
package scalariform.gui
object Utils {
def onSwingThread(proc: β Unit) = javax.swing.SwingUtilities.invokeLater(new Runnable() { def run() = proc })
import javax.swing.JTree
import javax.swing.tree._
def expandAll(tree: JTree) {
val root = tree.getModel().getRoot()
expandAll(tree, new TreePath(root))
}
private def expandAll(tree: JTree, parent: TreePath) {
val node = parent.getLastPathComponent()
val model = tree.getModel
val children = 0 until model.getChildCount(node) map { model.getChild(node, _) }
for (child β children) {
val path = parent.pathByAddingChild(child)
expandAll(tree, path)
}
tree.expandPath(parent)
}
}
| gawkermedia/scalariform | misc/src/main/scala/scalariform/gui/Utils.scala | Scala | mit | 695 |
/*
* Copyright 2013 - 2017 Outworkers Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.outworkers.morpheus.mysql.db.specialized
import com.outworkers.morpheus.DataType
import com.outworkers.morpheus.mysql.dsl._
import com.outworkers.morpheus.mysql.db.BaseSuite
import com.outworkers.morpheus.mysql.tables.{EnumerationRecord, EnumerationTable, TestEnumeration}
import com.outworkers.util.samplers.{Generators, Sample}
import com.outworkers.util.testing._
import org.scalatest.FlatSpec
import scala.concurrent.Await
import scala.concurrent.duration._
class EnumerationColumnTest extends FlatSpec with BaseSuite {
override def beforeAll(): Unit = {
super.beforeAll()
Await.result(EnumerationTable.create.ifNotExists.engine(InnoDB).future(), 5.seconds)
}
implicit object EnumerationRecordSampler extends Sample[EnumerationRecord] {
override def sample: EnumerationRecord = EnumerationRecord(gen[Int], Generators.oneOf(TestEnumeration))
}
implicit val enumPrimitive: DataType[TestEnumeration#Value] = SQLPrimitive(TestEnumeration)
it should "store a record with an enumeration defined inside it" in {
val record = gen[EnumerationRecord]
val chain = for {
store <- EnumerationTable.store(record).future()
get <- EnumerationTable.select.where(_.id eqs record.id).one
} yield get
whenReady(chain) { res =>
res.value shouldEqual record
}
}
}
| websudos/morpheus | morpheus-mysql/src/test/scala/com/outworkers/morpheus/mysql/db/specialized/EnumerationColumnTest.scala | Scala | bsd-2-clause | 1,937 |
package redstone.solver.util
import redstone._
object Utility {
def printSolution(initialBoard: Board, solutionBoard: Option[Board]) = {
println("\\nThis is how the initial board looks like")
println(initialBoard)
if(!solutionBoard.isDefined) {
println("Failed to find a solution!")
} else {
val moves = solutionBoard.get.movesSoFar.reverse
println("These are the moves for the solution, # of moves: " + moves.size)
var solutionByStepBoard = initialBoard
var currentMove: Int = 1
for(move <- moves) {
println(currentMove + ": Move piece id: " + move._1 + " in direction: " + move._2)
solutionByStepBoard = Board(solutionByStepBoard, move)
println(solutionByStepBoard)
currentMove += 1
}
println("\\nThis is how the final board looks like")
println(solutionBoard.get)
}
}
}
| skumargithub/redstone-solver | src/test/scala/redstone/solver/util/Utility.scala | Scala | gpl-2.0 | 910 |
package im.actor.server.group
import java.time.ZoneOffset
import im.actor.server.migrations.Migration
import slick.driver.PostgresDriver
import scala.concurrent.duration._
import scala.concurrent.{ Await, ExecutionContext, Future, Promise }
import akka.actor.{ ActorLogging, ActorSystem, Props }
import akka.pattern.pipe
import akka.persistence.{ PersistentActor, RecoveryCompleted }
import org.joda.time.DateTime
import slick.driver.PostgresDriver.api._
import im.actor.server.event.TSEvent
import im.actor.server.file.{ Avatar, AvatarImage, FileLocation }
import im.actor.server.{ persist β p, models }
private final case class Migrate(group: models.FullGroup, avatarData: Option[models.AvatarData], botUsers: Seq[models.GroupBot], groupUsers: Seq[models.GroupUser])
object GroupMigrator extends Migration {
protected override def migrationName: String = "2015-08-04-GroupsMigration"
protected override def migrationTimeout: Duration = 1.hour
protected override def startMigration()(implicit system: ActorSystem, db: PostgresDriver.api.Database, ec: ExecutionContext): Future[Unit] = {
db.run(p.Group.allIds) flatMap (ids β Future.sequence(ids map migrateSingle)) map (_ β ())
}
private def migrateSingle(groupId: Int)(implicit system: ActorSystem, db: Database): Future[Unit] = {
val promise = Promise[Unit]()
system.actorOf(props(promise, groupId), name = s"migrate_group_${groupId}")
promise.future
}
private def props(promise: Promise[Unit], groupId: Int)(implicit db: Database) = Props(classOf[GroupMigrator], promise, groupId, db)
}
private final class GroupMigrator(promise: Promise[Unit], groupId: Int, db: Database) extends PersistentActor with ActorLogging {
import GroupEvents._
private implicit val ec: ExecutionContext = context.dispatcher
override def persistenceId = GroupOffice.persistenceIdFor(groupId)
private def migrate(): Unit = {
db.run(p.Group.findFull(groupId)) foreach {
case Some(group) β
db.run(for {
avatarOpt β p.AvatarData.findByGroupId(groupId)
bots β p.GroupBot.findByGroup(groupId) map (_.map(Seq(_)).getOrElse(Seq.empty))
users β p.GroupUser.find(groupId)
} yield Migrate(
group = group,
avatarData = avatarOpt,
botUsers = bots,
groupUsers = users
)) pipeTo self onFailure {
case e β
log.error(e, "Failed to migrate group")
promise.failure(e)
context stop self
}
case None β
log.error("Group not found")
promise.failure(new Exception(s"Cannot find group ${groupId}"))
context stop self
}
}
override def receiveCommand: Receive = {
case m @ Migrate(group, avatarDataOpt, botUsers, users) β
log.info("Migrate: {}", m)
val created: TSEvent = TSEvent(group.createdAt, Created(group.id, group.creatorUserId, group.accessHash, group.title))
val botAdded: Vector[TSEvent] = botUsers.toVector map { bu β
TSEvent(group.createdAt, BotAdded(bu.userId, bu.token))
}
val becamePublic: Vector[TSEvent] =
if (group.isPublic)
Vector(TSEvent(group.createdAt, BecamePublic()))
else
Vector.empty
val (userAdded, userJoined): (Vector[TSEvent], Vector[TSEvent]) = (users.toVector map { gu β
(TSEvent(gu.invitedAt, UserInvited(gu.userId, gu.inviterUserId)),
gu.joinedAt map (ts β TSEvent(new DateTime(ts.toInstant(ZoneOffset.UTC).getEpochSecond() * 1000), UserJoined(gu.userId, gu.inviterUserId))))
}).unzip match {
case (i, j) β (i, j.flatten)
}
val avatarUpdated: Vector[TSEvent] = avatarDataOpt match {
case Some(models.AvatarData(_, _,
Some(smallFileId), Some(smallFileHash), Some(smallFileSize),
Some(largeFileId), Some(largeFileHash), Some(largeFileSize),
Some(fullFileId), Some(fullFileHash), Some(fullFileSize),
Some(fullWidth), Some(fullHeight))) β
Vector(TSEvent(group.avatarChangedAt, AvatarUpdated(Some(Avatar(
Some(AvatarImage(FileLocation(smallFileId, smallFileHash), 100, 100, smallFileSize.toLong)),
Some(AvatarImage(FileLocation(largeFileId, largeFileHash), 200, 200, largeFileSize.toLong)),
Some(AvatarImage(FileLocation(fullFileId, fullFileHash), fullWidth, fullHeight, fullFileSize.toLong))
)))))
case _ β Vector.empty
}
val events: Vector[TSEvent] = created +: (botAdded ++ becamePublic ++ userAdded ++ userJoined ++ avatarUpdated).toVector
persistAsync(events)(identity)
defer(TSEvent(new DateTime, "migrated")) { _ β
log.info("Migrated")
promise.success(())
context stop self
}
}
private[this] var migrationNeeded = true
override def receiveRecover: Receive = {
case TSEvent(_, _: Created) β
migrationNeeded = false
case RecoveryCompleted β
if (migrationNeeded) {
migrate()
} else {
promise.success(())
context stop self
}
}
}
| berserkertdl/actor-platform | actor-server/actor-core/src/main/scala/im/actor/server/group/GroupMigrator.scala | Scala | mit | 5,122 |
package lila.user
import io.lemonlabs.uri.Url
import scala.util.Try
object Links {
def make(text: String): List[Link] = text.linesIterator.to(List).map(_.trim).flatMap(toLink)
private val UrlRegex = """^(?:https?://)?+([^/]+)""".r.unanchored
private def toLink(line: String): Option[Link] =
line match {
case UrlRegex(domain) =>
Link.Site.allKnown find (_ matches domain) orElse
Try(Url.parse(domain).toStringPunycode).toOption.map(Link.Site.Other) map { site =>
Link(
site = site,
url = if (line startsWith "http") line else s"https://$line"
)
}
case _ => none
}
}
case class Link(site: Link.Site, url: String)
object Link {
sealed abstract class Site(val name: String, val domains: List[String]) {
def matches(domain: String) =
domains.exists { d =>
domain == d || domain.endsWith(s".$d")
}
}
object Site {
case object Twitter extends Site("Twitter", List("twitter.com"))
case object Facebook extends Site("Facebook", List("facebook.com"))
case object Instagram extends Site("Instagram", List("instagram.com"))
case object YouTube extends Site("YouTube", List("youtube.com"))
case object Twitch extends Site("Twitch", List("twitch.tv"))
case object GitHub extends Site("GitHub", List("github.com"))
case object VKontakte extends Site("VKontakte", List("vk.com"))
case object ChessCom extends Site("Chess.com", List("chess.com"))
case object Chess24 extends Site("Chess24", List("chess24.com"))
case object ChessTempo extends Site("ChessTempo", List("chesstempo.com"))
case class Other(domain: String) extends Site(domain, List(domain))
val allKnown: List[Site] = List(
Twitter,
Facebook,
Instagram,
YouTube,
Twitch,
GitHub,
VKontakte,
ChessCom,
Chess24,
ChessTempo
)
}
}
| luanlv/lila | modules/user/src/main/Links.scala | Scala | mit | 2,057 |
package strd.util
import java.net.{InetAddress, InetSocketAddress, NetworkInterface}
import com.eaio.util.lang.Hex
import scala.collection.mutable.ArrayBuffer
/**
*
* User: lembrd
* Date: 02/05/15
* Time: 16:11
*/
object InterfaceUtils {
val interfaces : Seq[IfaceInfo] = {
val ab = new ArrayBuffer[IfaceInfo]()
val enums = NetworkInterface.getNetworkInterfaces
while (enums.hasMoreElements) {
val e = enums.nextElement()
ab += IfaceInfo(e)
}
ab.toSeq
}
def hexMacForIface(iface : NetworkInterface) : String = {
Hex.append(new java.lang.StringBuilder(36), iface.getHardwareAddress).toString
}
def findByName(name : String ) : Option[IfaceInfo] = {
interfaces.find(x=>x.getName == name)
}
/*
if (e.isUp && !e.isLoopback && !e.isPointToPoint) {
val aa = e.getInetAddresses
while (aa.hasMoreElements) {
val ae = aa.nextElement()
if (ae.getHostAddress == localOutgoingIp) {
optIface = Some()
}
}
}
}
*/
}
case class IfaceInfo( iface: NetworkInterface) {
val addresses : Seq[InetAddress] = {
val ab = new ArrayBuffer[InetAddress]
val ae = iface.getInetAddresses
while (ae.hasMoreElements) {
ab += ae.nextElement()
}
ab.toSeq
}
def getParent: NetworkInterface = iface.getParent
def isPointToPoint: Boolean = iface.isPointToPoint
def isUp: Boolean = iface.isUp
def getDisplayName: String = iface.getDisplayName
def getHardwareAddress: Array[Byte] = iface.getHardwareAddress
def isLoopback: Boolean = iface.isLoopback
override def toString: String = iface.toString
def getIndex: Int = iface.getIndex
def isVirtual: Boolean = iface.isVirtual
def getName: String = iface.getName
def getHexMac : String = InterfaceUtils.hexMacForIface(iface)
def getMTU: Int = iface.getMTU
}
| onerinvestments/strd | strd-commons/src/main/scala/strd/util/InterfaceUtils.scala | Scala | apache-2.0 | 1,887 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.server.metadata
import java.util.Properties
import kafka.coordinator.group.GroupCoordinator
import kafka.coordinator.transaction.TransactionCoordinator
import kafka.log.LogConfig
import kafka.server.RaftReplicaManager
import kafka.utils.Implicits._
import org.apache.kafka.common.config.ConfigResource
import org.apache.kafka.common.metadata.{ConfigRecord, PartitionRecord, RemoveTopicRecord, TopicRecord}
import org.apache.kafka.common.protocol.ApiMessage
import org.apache.kafka.common.utils.MockTime
import org.apache.kafka.common.{TopicPartition, Uuid}
import org.junit.jupiter.api.Assertions._
import org.junit.jupiter.api.Test
import org.mockito.ArgumentMatchers._
import org.mockito.Mockito._
import org.mockito.{ArgumentCaptor, ArgumentMatchers}
import scala.collection.mutable
import scala.jdk.CollectionConverters._
class BrokerMetadataListenerTest {
private val brokerId = 1
private val time = new MockTime()
private val configRepository = new CachedConfigRepository
private val metadataCache = new RaftMetadataCache(brokerId)
private val groupCoordinator = mock(classOf[GroupCoordinator])
private val replicaManager = mock(classOf[RaftReplicaManager])
private val txnCoordinator = mock(classOf[TransactionCoordinator])
private val clientQuotaManager = mock(classOf[ClientQuotaMetadataManager])
private var lastMetadataOffset = 0L
private val listener = new BrokerMetadataListener(
brokerId,
time,
metadataCache,
configRepository,
groupCoordinator,
replicaManager,
txnCoordinator,
threadNamePrefix = None,
clientQuotaManager
)
@Test
def testTopicCreationAndDeletion(): Unit = {
val topicId = Uuid.randomUuid()
val topic = "foo"
val numPartitions = 10
val config = Map(
LogConfig.CleanupPolicyProp -> LogConfig.Compact,
LogConfig.MaxCompactionLagMsProp -> "5000"
)
val localPartitions = createAndAssert(topicId, topic, config, numPartitions, numBrokers = 4)
deleteTopic(topicId, topic, numPartitions, localPartitions)
}
private def deleteTopic(
topicId: Uuid,
topic: String,
numPartitions: Int,
localPartitions: Set[TopicPartition]
): Unit = {
val deleteRecord = new RemoveTopicRecord()
.setTopicId(topicId)
lastMetadataOffset += 1
listener.execCommits(lastOffset = lastMetadataOffset, List[ApiMessage](
deleteRecord,
).asJava)
assertFalse(metadataCache.contains(topic))
assertEquals(new Properties, configRepository.topicConfig(topic))
verify(groupCoordinator).handleDeletedPartitions(ArgumentMatchers.argThat[Seq[TopicPartition]] { partitions =>
partitions.toSet == partitionSet(topic, numPartitions)
})
val deleteImageCapture: ArgumentCaptor[MetadataImageBuilder] =
ArgumentCaptor.forClass(classOf[MetadataImageBuilder])
verify(replicaManager).handleMetadataRecords(
deleteImageCapture.capture(),
ArgumentMatchers.eq(lastMetadataOffset),
any()
)
val deleteImage = deleteImageCapture.getValue
assertTrue(deleteImage.hasPartitionChanges)
val localRemoved = deleteImage.partitionsBuilder().localRemoved()
assertEquals(localPartitions, localRemoved.map(_.toTopicPartition).toSet)
}
private def createAndAssert(
topicId: Uuid,
topic: String,
topicConfig: Map[String, String],
numPartitions: Int,
numBrokers: Int
): Set[TopicPartition] = {
val records = new java.util.ArrayList[ApiMessage]
records.add(new TopicRecord()
.setName(topic)
.setTopicId(topicId)
)
val localTopicPartitions = mutable.Set.empty[TopicPartition]
(0 until numPartitions).map { partitionId =>
val preferredLeaderId = partitionId % numBrokers
val replicas = asJavaList(Seq(
preferredLeaderId,
preferredLeaderId + 1,
preferredLeaderId + 2
))
if (replicas.contains(brokerId)) {
localTopicPartitions.add(new TopicPartition(topic, partitionId))
}
records.add(new PartitionRecord()
.setTopicId(topicId)
.setPartitionId(partitionId)
.setLeader(preferredLeaderId)
.setLeaderEpoch(0)
.setPartitionEpoch(0)
.setReplicas(replicas)
.setIsr(replicas)
)
}
topicConfig.forKeyValue { (key, value) =>
records.add(new ConfigRecord()
.setResourceName(topic)
.setResourceType(ConfigResource.Type.TOPIC.id())
.setName(key)
.setValue(value)
)
}
lastMetadataOffset += records.size()
listener.execCommits(lastOffset = lastMetadataOffset, records)
assertTrue(metadataCache.contains(topic))
assertEquals(Some(numPartitions), metadataCache.numPartitions(topic))
assertEquals(topicConfig, configRepository.topicConfig(topic).asScala)
val imageCapture: ArgumentCaptor[MetadataImageBuilder] =
ArgumentCaptor.forClass(classOf[MetadataImageBuilder])
verify(replicaManager).handleMetadataRecords(
imageCapture.capture(),
ArgumentMatchers.eq(lastMetadataOffset),
any()
)
val createImage = imageCapture.getValue
assertTrue(createImage.hasPartitionChanges)
val localChanged = createImage.partitionsBuilder().localChanged()
assertEquals(localTopicPartitions, localChanged.map(_.toTopicPartition).toSet)
localTopicPartitions.toSet
}
private def partitionSet(topic: String, numPartitions: Int): Set[TopicPartition] = {
(0 until numPartitions).map(new TopicPartition(topic, _)).toSet
}
private def asJavaList(replicas: Iterable[Int]): java.util.List[Integer] = {
replicas.map(Int.box).toList.asJava
}
}
| Chasego/kafka | core/src/test/scala/unit/kafka/server/metadata/BrokerMetadataListenerTest.scala | Scala | apache-2.0 | 6,443 |
package cs4r.labs.learningscala.ninetynineproblems
/**
* Find the Kth element of a list.
*/
object Problem3 {
def nth[T](n: Int, list: List[T]): T = (n, list) match {
case (0, x :: _) => x
case (n, _ :: tail) => nth(n - 1, tail)
case _ => throw new NoSuchElementException
}
}
| Cs4r/LearningScala | src/main/scala/cs4r/labs/learningscala/ninetynineproblems/Problem3.scala | Scala | gpl-3.0 | 298 |
/*
* Copyright 2011-2018 GatlingCorp (http://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.http.action.async.sse
import io.gatling.core.action.ActorBasedAction
trait SseAction extends ActorBasedAction {
override val actorFetchErrorMessage = "Couldn't fetch open sse"
}
| wiacekm/gatling | gatling-http/src/main/scala/io/gatling/http/action/async/sse/SseAction.scala | Scala | apache-2.0 | 819 |
package dotty.partest
import dotty.tools.dotc.reporting.ConsoleReporter
import scala.tools.partest.{ TestState, nest }
import java.io.{ File, PrintWriter, FileWriter }
/* NOTE: Adapted from partest.DirectCompiler */
class DPDirectCompiler(runner: DPTestRunner) extends nest.DirectCompiler(runner) {
override def compile(opts0: List[String], sources: List[File]): TestState = {
val clogFWriter = new FileWriter(runner.cLogFile.jfile, true)
val clogWriter = new PrintWriter(clogFWriter, true)
clogWriter.println("\\ncompiling " + sources.mkString(" ") + "\\noptions: " + opts0.mkString(" "))
try {
val processor =
if (opts0.exists(_.startsWith("#"))) dotty.tools.dotc.Bench else dotty.tools.dotc.Main
val clogger = new ConsoleReporter(writer = clogWriter)
val reporter = processor.process((sources.map(_.toString) ::: opts0).toArray, clogger)
if (!reporter.hasErrors) runner.genPass()
else {
clogWriter.println(reporter.summary)
runner.genFail(s"compilation failed with ${reporter.errorCount} errors")
}
} catch {
case t: Throwable =>
t.printStackTrace
t.printStackTrace(clogWriter)
runner.genCrash(t)
} finally {
clogFWriter.close
clogWriter.close
}
}
}
| densh/dotty | test/dotty/partest/DPDirectCompiler.scala | Scala | bsd-3-clause | 1,290 |
package vultura.calibration
import org.specs2.mutable.Specification
import org.specs2.specification.core.Fragment
import vultura.factor.inference.calibration.LBP
import vultura.factor.{Factor, FactorMatchers, NormalD, Problem}
import vultura.factor.generation._
import vultura.factor.inference.{ConvergenceStats, RegionBeliefs, VariationalResult}
import vultura.inference.gbp.{RgDiagnosis, TwoLayerOC}
import scala.util.Random
/**
* Created by thomas on 11.12.15.
*/
class TwoLayerOCPropagationTest extends Specification with FactorMatchers {
val p1: Problem =
problemGenerator(Generator.only(graph.lattice(4 -> false, 4 -> false))).generate(new Random(0)).problem.toRing(NormalD)
val t1: Problem =
problemGenerator(graph.randomTree(50)).generate(new Random(0)).problem.toRing(NormalD)
val tree_small: Problem =
problemGenerator(graph.randomTree(4)).generate(new Random(0)).problem.toRing(NormalD)
val magnetized1: Problem = {
val problemStructure = LabeledProblemStructure
.fromGraph(graph.lattice(4 -> false, 4 -> false), (_: IndexedSeq[Int]) => 2)
.addSingletons(_ => true)
IIDValuedParam(Generator.gaussian(0d, 1d)).parameterize(
problemStructure
).generate(new Random(0)).problem
}
def jt(p: Problem): (RegionBeliefs[TwoLayerOC#TLR] with VariationalResult, ConvergenceStats) = {
val rg = TwoLayerOC.junctionTreeMinDegree(p)
val prop = new TwoLayerOCPropagation(rg, p.ring)
Calibrator.calibrateParam(prop, p.factors, 1)
}
"test junction-tree on 4x4" >> {
val result = jt(p1)
(result._2.isConverged must beTrue) and
(result._1 must haveExactZ(p1)) and
(result._1 must haveExactMarginals(p1))
}
"test junction-tree on tree with 50 vars" >> {
val result = jt(t1)
(result._2.isConverged must beTrue) and
(result._1 must haveExactZ(t1)) and
(result._1 must haveExactMarginals(t1))
}
"test junction-tree tree with 4 vars" >> {
val result = jt(tree_small)
(result._2.isConverged must beTrue) and
(result._1 must haveExactZ(tree_small)) and
(result._1 must haveExactMarginals(tree_small))
}
"aggregated vs. asis Bethe region graph construction must yield same results on" >> {
"when there is no redundancy" >> {
aggregatedVsAsis(p1.simplify, "lattice 4x4, simplified")
aggregatedVsAsis(t1.simplify, "tree with 50 nodes, simplified")
}
"when there are redundant singleton factors" >> {
aggregatedVsAsis(magnetized1, "lattice 4x4 with magnetization")
}
}
/** Aggregated and asis Bethe region graphs should yield equal results (as long as there are no parallel cycles
* induced by redundant factor scopes. In particular if only singleton factors are redundant (their scope contained
* within the scope of other factors), the results must be equal.
* @param p
* @param name
* @return
*/
def aggregatedVsAsis(p: Problem, name: String, maxIterations: Long = 10000, maxDiff: Double = 1e-12, damping: Double = 0d): Fragment = {
s"$name" ! {
def calibrate(rg: TwoLayerOC): (RegionBeliefs[TwoLayerOC#TLR], ConvergenceStats) = {
val cp = new TwoLayerOCPropagation(rg, p.ring)
Calibrator.calibrateParam(cp, p.factors, maxIterations, tol = maxDiff, damping = damping)
}
val (aggr_res, aggr_stats) = calibrate(TwoLayerOC.betheRegionGraph(p,aggregateFactors = true))
val (asis_res, asis_stats) = calibrate(TwoLayerOC.betheRegionGraph(p,aggregateFactors = false))
(aggr_stats.isConverged.aka("aggregated is converged") must beTrue) and
(asis_stats.isConverged.aka("asis is converged") must beTrue) and
(aggr_res must haveSameMarginals(asis_res, 1e-6))
}
}
}
| ziggystar/vultura-factor | src/test/scala/vultura/calibration/TwoLayerOCPropagationTest.scala | Scala | mit | 3,735 |
package org.bjean.sample.wordcount.aws
import com.amazonaws.services.elasticmapreduce.model.HadoopJarStepConfig
import com.typesafe.config.Config
object SparkCommandStepBuilder {
val AWS_CLUSTER_SPARK_STEP_COMMAND: String = "aws.cluster.spark.stepCommand"
val AWS_CLUSTER_SPARK_DEPLOY_MODE: String = "aws.cluster.spark.deployMode"
val AWS_CLUSTER_SPARK_MASTER: String = "aws.cluster.spark.master"
val AWS_CLUSTER_SPARK_DRIVER_MEMORY: String = "aws.cluster.spark.driverMemory"
val AWS_CLUSTER_SPARK_EXECUTOR_MEMORY: String = "aws.cluster.spark.executorMemory"
val AWS_CLUSTER_SPARK_EXECUTORS: String = "aws.cluster.spark.executors"
}
class SparkCommandStepBuilder(config: Config) extends HadoopJarStepConfigBuilder(config) {
private var mainClass: String = _
private var jarLocation: String = _
private var programArgs: List[String] = List()
def withJarLocation(jarLocation: String): SparkCommandStepBuilder = {
this.jarLocation = jarLocation
this
}
def withMainClass(mainClass: String): SparkCommandStepBuilder = {
this.mainClass = mainClass
this
}
def withProgramArgs(programArgs: List[String]): SparkCommandStepBuilder = {
this.programArgs = programArgs
this
}
def build: HadoopJarStepConfig = {
val sparkCommandLine = List(
config.getString(SparkCommandStepBuilder.AWS_CLUSTER_SPARK_STEP_COMMAND),
"--deploy-mode",
config.getString(SparkCommandStepBuilder.AWS_CLUSTER_SPARK_DEPLOY_MODE),
"--master",
config.getString(SparkCommandStepBuilder.AWS_CLUSTER_SPARK_MASTER),
"--driver-memory",
config.getString(SparkCommandStepBuilder.AWS_CLUSTER_SPARK_DRIVER_MEMORY),
"--executor-memory",
config.getString(SparkCommandStepBuilder.AWS_CLUSTER_SPARK_EXECUTOR_MEMORY),
"--num-executors",
config.getString(SparkCommandStepBuilder.AWS_CLUSTER_SPARK_EXECUTORS),
"--class",
mainClass,
jarLocation)
val withProgramArg = sparkCommandLine ++ programArgs
import collection.JavaConverters._
new HadoopJarStepConfig(getScriptRunnerLocation).withArgs(withProgramArg.asJava)
}
}
| bjet007/word-count-spark-aws | aws-launcher/src/main/scala/org/bjean/sample/wordcount/aws/SparkCommandStepBuilder.scala | Scala | apache-2.0 | 2,128 |
package tastytest
object BigFunctions {
val bigfun: (x1: Int,x2: Int,x3: Int,x4: Int,x5: Int,x6: Int,x7: Int,x8: Int,x9: Int,x10: Int,x11: Int,x12: Int,x13: Int,x14: Int,x15: Int,x16: Int,x17: Int,x18: Int,x19: Int,x20: Int,x21: Int,x22: Int,x23: Int) => String = bigdef
def bigdef(x1: Int,x2: Int,x3: Int,x4: Int,x5: Int,x6: Int,x7: Int,x8: Int,x9: Int,x10: Int,x11: Int,x12: Int,x13: Int,x14: Int,x15: Int,x16: Int,x17: Int,x18: Int,x19: Int,x20: Int,x21: Int,x22: Int,x23: Int): String = ""
class BigFunBox[F <: (x1: Int,x2: Int,x3: Int,x4: Int,x5: Int,x6: Int,x7: Int,x8: Int,x9: Int,x10: Int,x11: Int,x12: Int,x13: Int,x14: Int,x15: Int,x16: Int,x17: Int,x18: Int,x19: Int,x20: Int,x21: Int,x22: Int,x23: Int) => String]
}
| scala/scala | test/tasty/neg/src-3/BigFunctions.scala | Scala | apache-2.0 | 737 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package play.api
import java.io._
import java.net.MalformedURLException
import java.net.URI
import java.net.URISyntaxException
import java.net.URL
import java.net.URLConnection
import java.nio.charset.StandardCharsets
import java.time.Period
import java.time.temporal.TemporalAmount
import java.util.Collections
import java.util.Objects
import java.util.Properties
import com.typesafe.config.ConfigException
import com.typesafe.config.ConfigFactory
import org.specs2.execute.FailureException
import org.specs2.mutable.Specification
import scala.concurrent.duration._
import scala.util.control.NonFatal
class ConfigurationSpec extends Specification {
import ConfigurationSpec._
def config(data: (String, Any)*): Configuration = Configuration.from(data.toMap)
def exampleConfig: Configuration = config(
"foo.bar1" -> "value1",
"foo.bar2" -> "value2",
"foo.bar3" -> null,
"blah.0" -> List(true, false, true),
"blah.1" -> List(1, 2, 3),
"blah.2" -> List(1.1, 2.2, 3.3),
"blah.3" -> List(1L, 2L, 3L),
"blah.4" -> List("one", "two", "three"),
"blah2" -> Map(
"blah3" -> Map(
"blah4" -> "value6"
)
),
"longlong" -> 79219707376851105L,
"longlonglist" -> Seq(-279219707376851105L, 8372206243289082062L, 1930906302765526206L),
)
def load(mode: Mode): Configuration = {
// system classloader should not have an application.conf
Configuration.load(Environment(new File("."), ClassLoader.getSystemClassLoader, mode))
}
"Configuration" should {
"support getting durations" in {
"simple duration" in {
val conf = config("my.duration" -> "10s")
val value = conf.get[Duration]("my.duration")
value must beEqualTo(10.seconds)
value.toString must beEqualTo("10 seconds")
}
"use minutes when possible" in {
val conf = config("my.duration" -> "120s")
val value = conf.get[Duration]("my.duration")
value must beEqualTo(2.minutes)
value.toString must beEqualTo("2 minutes")
}
"use seconds when minutes aren't accurate enough" in {
val conf = config("my.duration" -> "121s")
val value = conf.get[Duration]("my.duration")
value must beEqualTo(121.seconds)
value.toString must beEqualTo("121 seconds")
}
"handle 'infinite' as Duration.Inf" in {
val conf = config("my.duration" -> "infinite")
conf.get[Duration]("my.duration") must beEqualTo(Duration.Inf)
}
"handle null as Duration.Inf" in {
val conf = config("my.duration" -> null)
conf.get[Duration]("my.duration") must beEqualTo(Duration.Inf)
}
}
"support getting periods" in {
"month units" in {
val conf = config("my.period" -> "10 m")
val value = conf.get[Period]("my.period")
value must beEqualTo(Period.ofMonths(10))
value.toString must beEqualTo("P10M")
}
"day units" in {
val conf = config("my.period" -> "28 days")
val value = conf.get[Period]("my.period")
value must beEqualTo(Period.ofDays(28))
value.toString must beEqualTo("P28D")
}
"invalid format" in {
val conf = config("my.period" -> "5 donkeys")
conf.get[Period]("my.period") must throwA[ConfigException.BadValue]
}
}
"support getting temporal amounts" in {
"duration units" in {
val conf = config("my.time" -> "120s")
val value = conf.get[TemporalAmount]("my.time")
value must beEqualTo(java.time.Duration.ofMinutes(2))
value.toString must beEqualTo("PT2M")
}
"period units" in {
val conf = config("my.time" -> "3 weeks")
val value = conf.get[TemporalAmount]("my.time")
value must beEqualTo(Period.ofWeeks(3))
value.toString must beEqualTo("P21D")
}
"m means minutes, not months" in {
val conf = config("my.time" -> "12 m")
val value = conf.get[TemporalAmount]("my.time")
value must beEqualTo(java.time.Duration.ofMinutes(12))
value.toString must beEqualTo("PT12M")
}
"reject 'infinite'" in {
val conf = config("my.time" -> "infinite")
conf.get[TemporalAmount]("my.time") must throwA[ConfigException.BadValue]
}
"reject `null`" in {
val conf = config("my.time" -> null)
conf.get[TemporalAmount]("my.time") must throwA[ConfigException.Null]
}
}
"support getting URLs" in {
val validUrl = "https://example.com"
val invalidUrl = "invalid-url"
"valid URL" in {
val conf = config("my.url" -> validUrl)
val value = conf.get[URL]("my.url")
value must beEqualTo(new URL(validUrl))
}
"invalid URL" in {
val conf = config("my.url" -> invalidUrl)
def a: Nothing = {
conf.get[URL]("my.url")
throw FailureException(failure("MalformedURLException should be thrown"))
}
theBlock(a) must throwA[MalformedURLException]
}
}
"support getting URIs" in {
val validUri = "https://example.com"
val invalidUri = "%"
"valid URI" in {
val conf = config("my.uri" -> validUri)
val value = conf.get[URI]("my.uri")
value must beEqualTo(new URI(validUri))
}
"invalid URI" in {
val conf = config("my.uri" -> invalidUri)
def a: Nothing = {
conf.get[URI]("my.uri")
throw FailureException(failure("URISyntaxException should be thrown"))
}
theBlock(a) must throwA[URISyntaxException]
}
}
"support getting optional values via get[Option[...]]" in {
"when null" in {
config("foo.bar" -> null).get[Option[String]]("foo.bar") must beNone
}
"when set" in {
config("foo.bar" -> "bar").get[Option[String]]("foo.bar") must beSome("bar")
}
"when undefined" in {
config().get[Option[String]]("foo.bar") must throwA[ConfigException.Missing]
}
}
"support getting optional values via getOptional" in {
"when null" in {
config("foo.bar" -> null).getOptional[String]("foo.bar") must beNone
}
"when set" in {
config("foo.bar" -> "bar").getOptional[String]("foo.bar") must beSome("bar")
}
"when undefined" in {
config().getOptional[String]("foo.bar") must beNone
}
}
"support getting prototyped seqs" in {
val seq = config(
"bars" -> Seq(Map("a" -> "different a")),
"prototype.bars" -> Map("a" -> "some a", "b" -> "some b")
).getPrototypedSeq("bars")
seq must haveSize(1)
seq.head.get[String]("a") must_== "different a"
seq.head.get[String]("b") must_== "some b"
}
"support getting prototyped maps" in {
val map = config(
"bars" -> Map("foo" -> Map("a" -> "different a")),
"prototype.bars" -> Map("a" -> "some a", "b" -> "some b")
).getPrototypedMap("bars")
map must haveSize(1)
val foo = map("foo")
foo.get[String]("a") must_== "different a"
foo.get[String]("b") must_== "some b"
}
"be accessible as an entry set" in {
val map = Map(exampleConfig.entrySet.toList: _*)
map.keySet must contain(
allOf("foo.bar1", "foo.bar2", "blah.0", "blah.1", "blah.2", "blah.3", "blah.4", "blah2.blah3.blah4")
)
}
"make all paths accessible" in {
exampleConfig.keys must contain(
allOf("foo.bar1", "foo.bar2", "blah.0", "blah.1", "blah.2", "blah.3", "blah.4", "blah2.blah3.blah4")
)
}
"make all sub keys accessible" in {
exampleConfig.subKeys must contain(allOf("foo", "blah", "blah2"))
exampleConfig.subKeys must not(
contain(anyOf("foo.bar1", "foo.bar2", "blah.0", "blah.1", "blah.2", "blah.3", "blah.4", "blah2.blah3.blah4"))
)
}
"make all get accessible using scala" in {
exampleConfig.get[Seq[Boolean]]("blah.0") must ===(Seq(true, false, true))
exampleConfig.get[Seq[Int]]("blah.1") must ===(Seq(1, 2, 3))
exampleConfig.get[Seq[Double]]("blah.2") must ===(Seq(1.1, 2.2, 3.3))
exampleConfig.get[Seq[Long]]("blah.3") must ===(Seq(1L, 2L, 3L))
exampleConfig.get[Seq[String]]("blah.4") must contain(exactly("one", "two", "three"))
}
"handle longs of very large magnitude" in {
exampleConfig.get[Long]("longlong") must ===(79219707376851105L)
exampleConfig.get[Seq[Long]]("longlonglist") must ===(
Seq(-279219707376851105L, 8372206243289082062L, 1930906302765526206L)
)
}
"handle invalid and null configuration values" in {
exampleConfig.get[Seq[Boolean]]("foo.bar1") must throwA[com.typesafe.config.ConfigException]
exampleConfig.get[Boolean]("foo.bar3") must throwA[com.typesafe.config.ConfigException]
}
"query maps" in {
"objects with simple keys" in {
val configuration = Configuration(ConfigFactory.parseString("""
|foo.bar {
| one = 1
| two = 2
|}
""".stripMargin))
configuration.get[Map[String, Int]]("foo.bar") must_== Map("one" -> 1, "two" -> 2)
}
"objects with complex keys" in {
val configuration = Configuration(ConfigFactory.parseString("""
|test.files {
| "/public/index.html" = "html"
| "/public/stylesheets/\\"foo\\".css" = "css"
| "/public/javascripts/\\"bar\\".js" = "js"
|}
""".stripMargin))
configuration.get[Map[String, String]]("test.files") must_== Map(
"/public/index.html" -> "html",
"""/public/stylesheets/"foo".css""" -> "css",
"""/public/javascripts/"bar".js""" -> "js"
)
}
"nested objects" in {
val configuration = Configuration(ConfigFactory.parseString("""
|objects.a {
| "b.c" = { "D.E" = F }
| "d.e" = { "F.G" = H, "I.J" = K }
|}
""".stripMargin))
configuration.get[Map[String, Map[String, String]]]("objects.a") must_== Map(
"b.c" -> Map("D.E" -> "F"),
"d.e" -> Map("F.G" -> "H", "I.J" -> "K")
)
}
}
"throw serializable exceptions" in {
// from Typesafe Config
def copyViaSerialize(o: java.io.Serializable): AnyRef = {
val byteStream = new ByteArrayOutputStream()
val objectStream = new ObjectOutputStream(byteStream)
objectStream.writeObject(o)
objectStream.close()
val inStream = new ByteArrayInputStream(byteStream.toByteArray)
val inObjectStream = new ObjectInputStream(inStream)
val copy = inObjectStream.readObject()
inObjectStream.close()
copy
}
val conf = Configuration.from(
Map("item" -> "uh-oh, it's gonna blow")
)
locally {
try {
conf.get[Seq[String]]("item")
} catch {
case NonFatal(e) => copyViaSerialize(e)
}
} must not(throwA[Exception])
}
"fail if application.conf is not found" in {
"in dev mode" in (load(Mode.Dev) must throwA[PlayException])
"in prod mode" in (load(Mode.Prod) must throwA[PlayException])
"but not in test mode" in (load(Mode.Test) must not(throwA[PlayException]))
}
"throw a useful exception when invalid collections are passed in the load method" in {
Configuration.load(Environment.simple(), Map("foo" -> Seq("one", "two"))) must throwA[PlayException]
}
"InMemoryResourceClassLoader should return one resource" in {
import scala.collection.JavaConverters._
val cl = new InMemoryResourceClassLoader("reference.conf" -> "foo = ${bar}")
val url = new URL(null, "bytes:///reference.conf", (_: URL) => throw new IOException)
cl.findResource("reference.conf") must_== url
cl.getResource("reference.conf") must_== url
cl.getResources("reference.conf").asScala.toList must_== List(url)
}
"direct settings should have precedence over system properties when reading config.resource and config.file" in {
val userProps = new Properties()
userProps.put("config.resource", "application.from-user-props.res.conf")
userProps.put("config.file", "application.from-user-props.file.conf")
val direct = Map(
"config.resource" -> "application.from-direct.res.conf",
"config.file" -> "application.from-direct.file.conf",
)
val cl = new InMemoryResourceClassLoader(
"application.from-user-props.res.conf" -> "src = user-props",
"application.from-direct.res.conf" -> "src = direct",
)
val conf = Configuration.load(cl, userProps, direct, allowMissingApplicationConf = false)
conf.get[String]("src") must_== "direct"
}
"load from system properties when config.resource is not defined in direct settings" in {
val userProps = new Properties()
userProps.put("config.resource", "application.from-user-props.res.conf")
// Does not define config.resource nor config.file
val direct: Map[String, AnyRef] = Map.empty
val cl = new InMemoryResourceClassLoader(
"application.from-user-props.res.conf" -> "src = user-props"
)
val conf = Configuration.load(cl, userProps, direct, allowMissingApplicationConf = false)
conf.get[String]("src") must_== "user-props"
}
"validates reference.conf is self-contained" in {
val cl = new InMemoryResourceClassLoader("reference.conf" -> "foo = ${bar}")
Configuration.load(cl, new Properties(), Map.empty, true) must
throwA[PlayException]("Could not resolve substitution in reference.conf to a value")
}
"reference values from system properties" in {
val configuration = Configuration.load(Environment(new File("."), ClassLoader.getSystemClassLoader, Mode.Test))
val javaVersion = System.getProperty("java.specification.version")
val configJavaVersion = configuration.get[String]("test.system.property.java.spec.version")
configJavaVersion must beEqualTo(javaVersion)
}
"reference values from system properties when passing additional properties" in {
val configuration = Configuration.load(
ClassLoader.getSystemClassLoader,
new Properties(), // empty so that we can check that System Properties are still considered
directSettings = Map.empty,
allowMissingApplicationConf = true
)
val javaVersion = System.getProperty("java.specification.version")
val configJavaVersion = configuration.get[String]("test.system.property.java.spec.version")
configJavaVersion must beEqualTo(javaVersion)
}
"system properties override user-defined properties" in {
val userProperties = new Properties()
userProperties.setProperty("java.specification.version", "my java version")
val configuration = Configuration.load(
ClassLoader.getSystemClassLoader,
userProperties,
directSettings = Map.empty,
allowMissingApplicationConf = true
)
val javaVersion = System.getProperty("java.specification.version")
val configJavaVersion = configuration.get[String]("test.system.property.java.spec.version")
configJavaVersion must beEqualTo(javaVersion)
}
}
}
object ConfigurationSpec {
/** Allows loading in-memory resources. */
final class InMemoryResourceClassLoader(entries: (String, String)*) extends ClassLoader {
val bytes = entries.toMap.mapValues(_.getBytes(StandardCharsets.UTF_8)).toMap
override def findResource(name: String) = {
Objects.requireNonNull(name)
val spec = s"bytes:///$name"
bytes.get(name) match {
case None => null
case Some(bytes) => new URL(null, spec, (url: URL) => new BytesUrlConnection(url, bytes))
}
}
override def getResource(name: String) = findResource(name)
override def getResources(name: String) = {
findResource(name) match {
case null => Collections.emptyEnumeration()
case res1 => Collections.enumeration(Collections.singleton(res1))
}
}
}
final class BytesUrlConnection(url: URL, bytes: Array[Byte]) extends URLConnection(url) {
def connect() = ()
override def getInputStream = new ByteArrayInputStream(bytes)
}
}
| benmccann/playframework | core/play/src/test/scala/play/api/ConfigurationSpec.scala | Scala | apache-2.0 | 17,321 |
/*
* Copyright (c) 2019. Yuriy Stul
*/
package com.stulsoft.kafka.commit
import java.io.File
import com.typesafe.config.ConfigFactory
/** Application configuration
*
* @author Yuriy Stul
*/
object AppConfig {
// private lazy val config = ConfigFactory.parseFile(new File("app.conf"))
// .withFallback(ConfigFactory.load())
private lazy val config = ConfigFactory.load("app.conf")
private lazy val kafkaConfig = config.getConfig("kafka")
def kafkaServers(): String = kafkaConfig.getString("servers")
def kafkaAcks(): String = kafkaConfig.getString("acks")
def kafkaGroupId(): String = kafkaConfig.getString("groupId")
def kafkaTopic(): String = kafkaConfig.getString("topic")
}
| ysden123/poc | pkafka/kafka-commit/src/main/scala/com/stulsoft/kafka/commit/AppConfig.scala | Scala | mit | 714 |
package im.mange.common
import sys.process._
class ProcessRunner(name: String, command: String, workingDirectory: String = "") {
private val processBuilder = workingDirectory match {
case "" => Process(command)
case _ => Process(command, new java.io.File(workingDirectory))
}
//TODO: replace null with Option
private var process: Process = null
def start() = { process = processBuilder.run(); this }
def start(logger: ProcessLogger) = { process = processBuilder.run(logger); this }
def stop(destroy: Boolean = true, waitForExitValue: Boolean = true) {
if (destroy) {
process.destroy()
println("### " + name + " destroyed")
}
if (waitForExitValue) {
println("### " + name + " exited with value: " + process.exitValue())
}
}
} | alltonp/driveby | src/main/scala/im/mange/common/ProcessRunner.scala | Scala | apache-2.0 | 788 |
/*
* Copyright 2012 Twitter Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.twitter.zipkin.collector.processor
import com.twitter.finagle.Service
import com.twitter.ostrich.stats.Stats
import com.twitter.util.Future
import com.twitter.zipkin.common.Span
import com.twitter.zipkin.gen
class OstrichService(serviceStatsPrefix: String) extends Service[Span, Unit] {
def apply(span: Span): Future[Unit] = {
for {
start <- span.getAnnotation(gen.Constants.SERVER_RECV)
end <- span.getAnnotation(gen.Constants.SERVER_SEND)
} {
span.serviceNames.foreach(serviceName => {
Stats.addMetric(serviceStatsPrefix + serviceName, (end - start).toInt)
Stats.addMetric(serviceStatsPrefix + serviceName + "." + span.name, (end - start).toInt)
})
}
Future.Unit
}
}
| AnSavvides/zipkin | zipkin-collector-core/src/main/scala/com/twitter/zipkin/collector/processor/OstrichService.scala | Scala | apache-2.0 | 1,349 |
package eventstore
package operations
import eventstore.ReadEventError._
import eventstore.operations.Inspection.Decision.{ Stop, Fail }
import org.specs2.mock.Mockito
import org.specs2.mutable.Specification
import scala.util.{ Failure, Success }
class ReadEventInspectionSpec extends Specification with Mockito {
val inspection = ReadEventInspection(ReadEvent(EventStream.Id("test"))).pf
"ReadEventInspection" should {
"handle ReadEventCompleted" in {
inspection(Success(ReadEventCompleted(mock[Event]))) mustEqual Stop
}
"handle StreamNotFound" in {
inspection(Failure(StreamNotFound)) must beLike {
case Fail(_: StreamNotFoundException) => ok
}
}
"handle StreamDeleted" in {
inspection(Failure(StreamDeleted)) must beLike {
case Fail(_: StreamDeletedException) => ok
}
}
"handle EventNotFound" in {
inspection(Failure(EventNotFound)) must beLike {
case Fail(_: EventNotFoundException) => ok
}
}
"handle Error" in {
inspection(Failure(Error(None))) must beLike {
case Fail(_: ServerErrorException) => ok
}
}
"handle AccessDenied" in {
inspection(Failure(AccessDenied)) must beLike {
case Fail(_: AccessDeniedException) => ok
}
}
}
}
| pawelkaczor/EventStore.JVM | src/test/scala/eventstore/operations/ReadEventInspectionSpec.scala | Scala | bsd-3-clause | 1,304 |
package org.jetbrains.plugins.scala.codeInsight.template.macros
import com.intellij.codeInsight.CodeInsightBundle
import com.intellij.codeInsight.template._
import org.jetbrains.plugins.scala.codeInsight.template.impl.ScalaCodeContextType
import org.jetbrains.plugins.scala.codeInsight.template.util.MacroUtil
/**
* @author Roman.Shein
* @since 22.09.2015.
*/
class ScalaExpressionTypeMacro extends Macro {
override def calculateResult(params: Array[Expression], context: ExpressionContext): Result = {
if (params.length != 1) return null
MacroUtil.resultToScExpr(params.head.calculateResult(context), context).flatMap(_.getType().toOption).
map(myType => new ScalaTypeResult(myType)).orNull
}
override def getName: String = MacroUtil.scalaIdPrefix + "expressionType"
override def getPresentableName: String = MacroUtil.scalaPresentablePrefix + CodeInsightBundle.message("macro.expression.type")
override def isAcceptableInContext(context: TemplateContextType): Boolean = context.isInstanceOf[ScalaCodeContextType]
}
| double-y/translation-idea-plugin | src/org/jetbrains/plugins/scala/codeInsight/template/macros/ScalaExpressionTypeMacro.scala | Scala | apache-2.0 | 1,055 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.