code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
package com.goibibo.sqlshift.services
import com.whisk.docker.{DockerContainer, DockerKit, DockerReadyChecker}
/**
* Project: sqlshift
* Author: shivamsharma
* Date: 5/8/18.
*/
trait DockerZookeeperService extends DockerKit {
val ZK_PORT: Int = 2181
lazy val zkContainer: DockerContainer = DockerContainer("zookeeper:3.3.6")
.withPorts(ZK_PORT -> Some(ZK_PORT))
.withReadyChecker(DockerReadyChecker.LogLineContains("binding to port 0.0.0.0/0.0.0.0:2181"))
abstract override def dockerContainers: List[DockerContainer] = zkContainer :: super.dockerContainers
}
| goibibo/SqlShift | src/test/scala/com/goibibo/sqlshift/services/DockerZookeeperService.scala | Scala | mit | 607 |
object Test extends App {
class Foo
class Bar extends Foo
def overload(implicit foo: Foo): Unit = {}
def overload(implicit bar: Bar): Unit = {}
overload(new Bar)
}
| som-snytt/dotty | tests/pos/3190.scala | Scala | apache-2.0 | 176 |
package com.sksamuel.scapegoat.inspections
import com.sksamuel.scapegoat.{Levels, Inspection, Reporter}
import scala.reflect.runtime._
/** @author Stephen Samuel */
class JavaConversionsUse extends Inspection {
import universe._
override def traverser(reporter: Reporter) = new Traverser {
override def traverse(tree: Tree): Unit = {
tree match {
case Select(_, TermName("JavaConversions")) =>
reporter.warn("Java conversions", tree, Levels.Error, "Use of java conversions " + tree.toString().take(100))
case _ => super.traverse(tree)
}
}
}
}
| RichardBradley/scapegoat | src/main/scala/com/sksamuel/scapegoat/inspections/JavaConversionsUse.scala | Scala | apache-2.0 | 600 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.parser
import org.apache.spark.SparkFunSuite
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.plans.SQLHelper
import org.apache.spark.sql.internal.SQLConf
class TableIdentifierParserSuite extends SparkFunSuite with SQLHelper {
import CatalystSqlParser._
// Add "$elem$", "$value$" & "$key$"
// It is recommended to list them in alphabetical order.
val hiveNonReservedKeyword = Array(
"add",
"admin",
"after",
"all",
"alter",
"analyze",
"any",
"archive",
"array",
"as",
"asc",
"at",
"authorization",
"autocommit",
"before",
"between",
"bigint",
"binary",
"boolean",
"both",
"bucket",
"buckets",
"by",
"cascade",
"change",
"cluster",
"clustered",
"clusterstatus",
"collection",
"columns",
"comment",
"compact",
"compactions",
"compute",
"concatenate",
"continue",
"cost",
"create",
"cube",
"current_date",
"current_timestamp",
"cursor",
"data",
"databases",
"date",
"datetime",
"day",
"days",
"dbproperties",
"decimal",
"deferred",
"defined",
"delete",
"delimited",
"dependency",
"desc",
"describe",
"directories",
"directory",
"disable",
"distribute",
"double",
"drop",
"enable",
"escaped",
"exclusive",
"exists",
"explain",
"export",
"external",
"extract",
"false",
"fetch",
"fields",
"file",
"fileformat",
"first",
"float",
"for",
"format",
"formatted",
"functions",
"grant",
"group",
"grouping",
"hold_ddltime",
"hour",
"hours",
"idxproperties",
"ignore",
"import",
"in",
"index",
"indexes",
"inpath",
"inputdriver",
"inputformat",
"insert",
"int",
"into",
"is",
"isolation",
"items",
"jar",
"key_type",
"keys",
"last",
"lateral",
"leading",
"level",
"like",
"limit",
"lines",
"load",
"local",
"location",
"lock",
"locks",
"logical",
"long",
"mapjoin",
"materialized",
"metadata",
"microsecond",
"microseconds",
"millisecond",
"milliseconds",
"minus",
"minute",
"minutes",
"month",
"months",
"msck",
"no_drop",
"none",
"noscan",
"null",
"nulls",
"of",
"offline",
"offset",
"option",
"order",
"out",
"outer",
"outputdriver",
"outputformat",
"overwrite",
"owner",
"partition",
"partitioned",
"partitions",
"percent",
"pivot",
"plus",
"position",
"pretty",
"principals",
"procedure",
"protection",
"purge",
"query",
"range",
"read",
"readonly",
"reads",
"rebuild",
"recordreader",
"recordwriter",
"regexp",
"reload",
"rename",
"repair",
"replace",
"replication",
"restrict",
"revoke",
"rewrite",
"rlike",
"role",
"roles",
"rollup",
"row",
"rows",
"schemas",
"second",
"seconds",
"serde",
"serdeproperties",
"server",
"set",
"sets",
"shared",
"show",
"show_database",
"skewed",
"smallint",
"snapshot",
"sort",
"sorted",
"ssl",
"statistics",
"stored",
"streamtable",
"string",
"struct",
"table",
"tables",
"tblproperties",
"temporary",
"terminated",
"timestamp",
"tinyint",
"to",
"touch",
"trailing",
"transaction",
"transactions",
"trigger",
"trim",
"true",
"truncate",
"unarchive",
"undo",
"uniontype",
"unlock",
"unset",
"unsigned",
"update",
"uri",
"use",
"user",
"utc",
"utctimestamp",
"values",
"view",
"week",
"weeks",
"while",
"with",
"work",
"write",
"year",
"years")
val hiveStrictNonReservedKeyword = Seq(
"anti",
"cross",
"database",
"except",
"from",
"full",
"having",
"inner",
"intersect",
"join",
"left",
"natural",
"not",
"on",
"right",
"select",
"semi",
"table",
"to",
"union",
"where",
"with")
// All the keywords in `docs/sql-keywords.md` are listed below:
val allCandidateKeywords = Set(
"add",
"after",
"all",
"alter",
"analyze",
"and",
"anti",
"any",
"archive",
"array",
"as",
"asc",
"at",
"authorization",
"between",
"both",
"bucket",
"buckets",
"by",
"cache",
"cascade",
"case",
"cast",
"change",
"check",
"clear",
"cluster",
"clustered",
"codegen",
"collate",
"collection",
"column",
"columns",
"comment",
"commit",
"compact",
"compactions",
"compute",
"concatenate",
"constraint",
"cost",
"create",
"cross",
"cube",
"current",
"current_date",
"current_time",
"current_timestamp",
"current_user",
"data",
"database",
"databases",
"day",
"days",
"dbproperties",
"defined",
"delete",
"delimited",
"desc",
"describe",
"dfs",
"directories",
"directory",
"distinct",
"distribute",
"div",
"drop",
"else",
"end",
"escaped",
"except",
"exchange",
"exists",
"explain",
"export",
"extended",
"external",
"extract",
"false",
"fetch",
"fields",
"fileformat",
"first",
"following",
"for",
"foreign",
"format",
"formatted",
"from",
"full",
"function",
"functions",
"global",
"grant",
"group",
"grouping",
"having",
"hour",
"hours",
"if",
"ignore",
"import",
"in",
"index",
"indexes",
"inner",
"inpath",
"inputformat",
"insert",
"intersect",
"interval",
"into",
"is",
"items",
"join",
"keys",
"last",
"lateral",
"lazy",
"leading",
"left",
"like",
"limit",
"lines",
"list",
"load",
"local",
"location",
"lock",
"locks",
"logical",
"macro",
"map",
"microsecond",
"microseconds",
"millisecond",
"milliseconds",
"minus",
"minute",
"minutes",
"month",
"months",
"msck",
"natural",
"no",
"not",
"null",
"nulls",
"of",
"on",
"only",
"option",
"options",
"or",
"order",
"out",
"outer",
"outputformat",
"over",
"overlaps",
"overwrite",
"partition",
"partitioned",
"partitions",
"percent",
"pivot",
"position",
"preceding",
"primary",
"principals",
"purge",
"query",
"range",
"recordreader",
"recordwriter",
"recover",
"reduce",
"references",
"refresh",
"rename",
"repair",
"replace",
"reset",
"restrict",
"revoke",
"right",
"rlike",
"role",
"roles",
"rollback",
"rollup",
"row",
"rows",
"schema",
"second",
"seconds",
"select",
"semi",
"separated",
"serde",
"serdeproperties",
"session_user",
"set",
"sets",
"show",
"skewed",
"some",
"sort",
"sorted",
"start",
"statistics",
"stored",
"stratify",
"struct",
"substr",
"substring",
"table",
"tables",
"tablesample",
"tblproperties",
"temporary",
"terminated",
"then",
"to",
"touch",
"trailing",
"transaction",
"transactions",
"transform",
"true",
"truncate",
"unarchive",
"unbounded",
"uncache",
"union",
"unique",
"unlock",
"unset",
"use",
"user",
"using",
"values",
"view",
"week",
"weeks",
"when",
"where",
"window",
"with",
"year",
"years")
val reservedKeywordsInAnsiMode = Set(
"all",
"and",
"anti",
"any",
"as",
"authorization",
"both",
"case",
"cast",
"check",
"collate",
"column",
"constraint",
"create",
"cross",
"current_date",
"current_time",
"current_timestamp",
"current_user",
"day",
"distinct",
"else",
"end",
"except",
"false",
"fetch",
"for",
"foreign",
"from",
"full",
"grant",
"group",
"having",
"hour",
"in",
"inner",
"intersect",
"into",
"join",
"is",
"leading",
"left",
"minute",
"month",
"natural",
"not",
"null",
"on",
"only",
"or",
"order",
"outer",
"overlaps",
"primary",
"references",
"right",
"select",
"semi",
"session_user",
"minus",
"second",
"some",
"table",
"then",
"to",
"trailing",
"union",
"unique",
"user",
"using",
"when",
"where",
"with",
"year")
val nonReservedKeywordsInAnsiMode = allCandidateKeywords -- reservedKeywordsInAnsiMode
test("table identifier") {
// Regular names.
assert(TableIdentifier("q") === parseTableIdentifier("q"))
assert(TableIdentifier("q", Option("d")) === parseTableIdentifier("d.q"))
// Illegal names.
Seq("", "d.q.g", "t:", "${some.var.x}", "tab:1").foreach { identifier =>
intercept[ParseException](parseTableIdentifier(identifier))
}
}
test("quoted identifiers") {
assert(TableIdentifier("z", Some("x.y")) === parseTableIdentifier("`x.y`.z"))
assert(TableIdentifier("y.z", Some("x")) === parseTableIdentifier("x.`y.z`"))
assert(TableIdentifier("z", Some("`x.y`")) === parseTableIdentifier("```x.y```.z"))
assert(TableIdentifier("`y.z`", Some("x")) === parseTableIdentifier("x.```y.z```"))
assert(TableIdentifier("x.y.z", None) === parseTableIdentifier("`x.y.z`"))
}
test("table identifier - reserved/non-reserved keywords if ANSI mode enabled") {
withSQLConf(SQLConf.ANSI_SQL_PARSER.key -> "true") {
reservedKeywordsInAnsiMode.foreach { keyword =>
val errMsg = intercept[ParseException] {
parseTableIdentifier(keyword)
}.getMessage
assert(errMsg.contains("no viable alternative at input"))
assert(TableIdentifier(keyword) === parseTableIdentifier(s"`$keyword`"))
assert(TableIdentifier(keyword, Option("db")) === parseTableIdentifier(s"db.`$keyword`"))
}
nonReservedKeywordsInAnsiMode.foreach { keyword =>
assert(TableIdentifier(keyword) === parseTableIdentifier(s"$keyword"))
assert(TableIdentifier(keyword, Option("db")) === parseTableIdentifier(s"db.$keyword"))
}
}
}
test("table identifier - strict keywords") {
// SQL Keywords.
hiveStrictNonReservedKeyword.foreach { keyword =>
assert(TableIdentifier(keyword) === parseTableIdentifier(keyword))
assert(TableIdentifier(keyword) === parseTableIdentifier(s"`$keyword`"))
assert(TableIdentifier(keyword, Option("db")) === parseTableIdentifier(s"db.`$keyword`"))
}
}
test("table identifier - non reserved keywords") {
// Hive keywords are allowed.
hiveNonReservedKeyword.foreach { nonReserved =>
assert(TableIdentifier(nonReserved) === parseTableIdentifier(nonReserved))
}
}
test("SPARK-17364 table identifier - contains number") {
assert(parseTableIdentifier("123_") == TableIdentifier("123_"))
assert(parseTableIdentifier("1a.123_") == TableIdentifier("123_", Some("1a")))
// ".123" should not be treated as token of type DECIMAL_VALUE
assert(parseTableIdentifier("a.123A") == TableIdentifier("123A", Some("a")))
// ".123E3" should not be treated as token of type SCIENTIFIC_DECIMAL_VALUE
assert(parseTableIdentifier("a.123E3_LIST") == TableIdentifier("123E3_LIST", Some("a")))
// ".123D" should not be treated as token of type DOUBLE_LITERAL
assert(parseTableIdentifier("a.123D_LIST") == TableIdentifier("123D_LIST", Some("a")))
// ".123BD" should not be treated as token of type BIGDECIMAL_LITERAL
assert(parseTableIdentifier("a.123BD_LIST") == TableIdentifier("123BD_LIST", Some("a")))
}
test("SPARK-17832 table identifier - contains backtick") {
val complexName = TableIdentifier("`weird`table`name", Some("`d`b`1"))
assert(complexName === parseTableIdentifier("```d``b``1`.```weird``table``name`"))
assert(complexName === parseTableIdentifier(complexName.quotedString))
intercept[ParseException](parseTableIdentifier(complexName.unquotedString))
// Table identifier contains countious backticks should be treated correctly.
val complexName2 = TableIdentifier("x``y", Some("d``b"))
assert(complexName2 === parseTableIdentifier(complexName2.quotedString))
}
}
| aosagie/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/TableIdentifierParserSuite.scala | Scala | apache-2.0 | 13,700 |
/*
* Copyright (c) 2011-2012 by Alexander GrΓΌnewald
*
* This file is part of gruenewa-commons, a collection of generally
* useful utility functions.
*
* gruenewa-commons is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package gruenewa
package object prelude {
/**
* Thrush combinator that allows to use
* F# pipline syntax when in scope.
*/
final class Thrush[A](a: A) {
def |>[B](f: A => B): B = f(a)
}
@inline
implicit def |>[A](a: => A) = new Thrush(a)
/**
* Using statement that models an ARM block
* to auto-close resources.
*/
type Closable = { def close() }
@inline
def using[T <: Closable, A](resource: T)(block: T => A): A = {
try {
block(resource)
} finally {
if (resource != null) {
resource.close()
}
}
}
@inline
def wrapException[T](block: => T): Either[Throwable, T] =
try {
Right(block)
} catch {
case e => Left(e)
}
/**
* Very simple time measurement function.
*/
@inline
def time[T](block: => T) = {
val t0 = System.nanoTime
try {
block
} finally {
val t1 = System.nanoTime
printf("elapsed: %.6f ms\\n", 1e-6*(t1-t0))
}
}
/**
* Runs the given code block or throws
* an exception on timeout.
*/
@throws(classOf[java.util.concurrent.TimeoutException])
def timedRun[F](timeout: Long)(f: => F): F = {
import java.util.concurrent.{Callable, FutureTask, TimeUnit}
val task = new FutureTask(new Callable[F]() {
def call() = f
})
new Thread(task).start()
task.get(timeout, TimeUnit.MILLISECONDS)
}
}
| gruenewa/gruenewa-misc | gruenewa-commons/src/main/scala/Prelude.scala | Scala | gpl-3.0 | 2,233 |
/* __ *\\
** ________ ___ / / ___ __ ____ Scala.js sbt plugin **
** / __/ __// _ | / / / _ | __ / // __/ (c) 2013, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ |/_// /_\\ \\ http://scala-js.org/ **
** /____/\\___/_/ |_/____/_/ | |__/ /____/ **
** |/____/ **
\\* */
package org.scalajs.sbtplugin.cross
import sbt._
final class CrossClasspathDependency(
val project: CrossProject,
val configuration: Option[String]
) {
def jvm: ClasspathDependency = ClasspathDependency(project.jvm, configuration)
def js: ClasspathDependency = ClasspathDependency(project.js, configuration)
}
object CrossClasspathDependency {
final class Constructor(crossProject: CrossProject) {
def %(conf: Configuration): CrossClasspathDependency = %(conf.name)
def %(conf: String): CrossClasspathDependency =
new CrossClasspathDependency(crossProject, Some(conf))
}
}
| jmnarloch/scala-js | sbt-plugin/src/main/scala/scala/scalajs/sbtplugin/cross/CrossClasspathDependency.scala | Scala | bsd-3-clause | 1,134 |
package calc
object Calc9 extends App {
val stream = Stream.from(1).flatMap { _.toString }
println(stream.take(28383).last)
}
| ebowman/calc | src/main/scala/calc/Calc9.scala | Scala | unlicense | 133 |
package epic.parser
package projections
/*
Copyright 2012 David Hall
Licensed under the Apache License, Version 2.0 (the "License")
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import projections.AnchoredForestProjector.ForestData
import breeze.collection.mutable.{TriangularArray, OpenAddressHashArray}
import epic.lexicon.Lexicon
import epic.constraints.ChartConstraints
/**
* Creates a grammar using only span marginals and unary rule marginals
* @author dlwh
*/
case class LabeledSpanProjector[L, W](topology: RuleTopology[L], threshold: Double = Double.NegativeInfinity) extends ChartProjector[L, W] {
type MyAnchoring = SpanAnchoring[L, W]
private def normalize(ruleScores: OpenAddressHashArray[Double], totals: OpenAddressHashArray[Double]):OpenAddressHashArray[Double] = {
if(ruleScores eq null) null
else {
val r = new OpenAddressHashArray[Double](ruleScores.length, Double.NegativeInfinity)
for( (rule, score) <- ruleScores.activeIterator) {
val parent = topology.parent(rule)
if(score > 0.9999999) {
r(rule) = 10
} else if(score > 0) {
r(rule) = math.log(score) - math.log1p(-score)
}
}
r
}
}
private def normalizeSpans(totals: OpenAddressHashArray[Double]):OpenAddressHashArray[Double] = {
if(totals eq null) null
else {
val r = new OpenAddressHashArray[Double](totals.length, Double.NegativeInfinity)
for( (parent, score) <- totals.activeIterator) {
if(score > 0.9999999) {
r(parent) = 10
} else if(score > 0) {
r(parent) = math.log(score) - math.log1p(-score)
}
}
r
}
}
protected def createAnchoring(charts: ParseMarginal[L, W], ruleData: ForestData, sentProb: Double) = {
val AnchoredForestProjector.ForestData(lexicalScores, unaryScores, totalsUnaries, binaryScores, totalsBinaries) = ruleData
val normUnaries:Array[OpenAddressHashArray[Double]] = for((ruleScores, totals) <- unaryScores zip totalsUnaries) yield {
normalize(ruleScores, totals)
}
val normSpans:Array[OpenAddressHashArray[Double]] = for( totals <- totalsBinaries) yield {
normalizeSpans(totals)
}
val sparsity = charts.anchoring.sparsityPattern
new SpanAnchoring(charts.topology, charts.lexicon, charts.words, sparsity, normSpans, normUnaries)
}
}
/**
* A SpanAnchoring just scores spans and unary rules. BinaryRules are all given score 0.0
* @param topology
* @param lexicon
* @param words
* @param spanScores
* @param unaryScores
* @tparam L
* @tparam W
*/
@SerialVersionUID(1L)
case class SpanAnchoring[L, W](topology: RuleTopology[L],
lexicon: Lexicon[L, W],
words: IndexedSeq[W],
sparsityPattern: ChartConstraints[L],
spanScores: Array[OpenAddressHashArray[Double]],
unaryScores: Array[OpenAddressHashArray[Double]]) extends UnrefinedGrammarAnchoring[L, W] {
def addConstraints(cs: ChartConstraints[L]) = copy(sparsityPattern = sparsityPattern & cs)
def scoreBinaryRule(begin: Int, split: Int, end: Int, rule: Int) = 0.0
def scoreUnaryRule(begin: Int, end: Int, rule: Int) = {
val forSpan = unaryScores(TriangularArray.index(begin, end))
if(forSpan eq null) Double.NegativeInfinity
else forSpan(rule)
}
def scoreSpan(begin: Int, end: Int, tag: Int) = {
val scores = spanScores(TriangularArray.index(begin, end))
if(scores ne null) scores(tag)
else Double.NegativeInfinity
}
}
| maxim-rabinovich/epic | src/main/scala/epic/parser/projections/LabeledSpanProjector.scala | Scala | apache-2.0 | 4,026 |
package reswing.reshapes.ui.panels
import scala.swing.BoxPanel
import scala.swing.Component
import scala.swing.Orientation
import scala.swing.ScrollPane
import rescala.default._
import reswing.reshapes.ReShapes
import reswing.reshapes.drawing.Command
import reswing.reshapes.util.ReactiveUtil.UnionEvent
import reswing.ReBoxPanel
import reswing.ReButton
/** The CommandPanel lists all executed commands and makes it possible to revert them */
class CommandPanel extends BoxPanel(Orientation.Vertical) {
def state = ReShapes.drawingSpaceState
val commands = Signal.dynamic { if (state() != null) state().commands() else List.empty } // #SIG
val buttonsEvents = Signal { // #SIG
commands() map { command =>
val button = new ReButton(command.description) // #IS( //#EVT )
(button: Component, button.clicked map { _: Any => command })
}
}
val revert = UnionEvent(Signal { // #SIG //#UE( //#EVT //#IF )
buttonsEvents() map { case (_, ev) => ev: Event[Command] }
})
val commandPanel = new ReBoxPanel(
orientation = Orientation.Vertical,
contents = Signal { (buttonsEvents() map { case (btn, _) => btn }): Seq[Component] }
) // #SIG //#IS( // )
contents += new ScrollPane {
contents = commandPanel
}
}
| guidosalva/REScala | Code/Examples/examples/src/main/scala/reswing/reshapes/ui/panels/CommandPanel.scala | Scala | apache-2.0 | 1,259 |
/*
* @author Philip Stutz
* @author Thomas Keller
*
* Copyright 2011 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect.features
import org.junit.runner.RunWith
import org.specs2.mock.Mockito
import org.specs2.mutable.SpecificationWithJUnit
import com.signalcollect.ExecutionConfiguration
import com.signalcollect.GlobalTerminationCondition
import com.signalcollect.Graph
import com.signalcollect.GraphBuilder
import com.signalcollect.SumOfStates
import com.signalcollect.configuration.ExecutionMode
import com.signalcollect.configuration.TerminationReason
import com.signalcollect.examples.PageRankEdge
import com.signalcollect.examples.PageRankVertex
import com.signalcollect.nodeprovisioning.Node
import com.signalcollect.nodeprovisioning.local.LocalNodeProvisioner
import org.specs2.runner.JUnitRunner
import com.typesafe.config.Config
import akka.actor.ActorRef
import com.signalcollect.configuration.ActorSystemRegistry
import akka.actor.Props
import com.signalcollect.nodeprovisioning.NodeActorCreator
import com.signalcollect.nodeprovisioning.DefaultNodeActor
@RunWith(classOf[JUnitRunner])
class ComputationTerminationSpec extends SpecificationWithJUnit with Mockito {
def createCircleGraph(vertices: Int, numberOfWorkers: Option[Int] = None): Graph[Any, Any] = {
val graph = {
if (numberOfWorkers.isEmpty) {
GraphBuilder.build
} else {
GraphBuilder.withNodeProvisioner(new LocalNodeProvisioner {
override def getNodes(akkaConfig: Config): Array[ActorRef] = {
val system = ActorSystemRegistry.retrieve("SignalCollect").getOrElse(throw new Exception("No actor system with name \\"SignalCollect\\" found!"))
if (system != null) {
val nodeControllerCreator = NodeActorCreator(0, None)
val nodeController = system.actorOf(Props[DefaultNodeActor].withCreator(nodeControllerCreator.create), name = "DefaultNodeActor")
Array[ActorRef](nodeController)
} else {
Array[ActorRef]()
}
}
}).build
}
}
val idSet = (1 to vertices).toSet
for (id <- idSet) {
graph.addVertex(new PageRankVertex(id))
}
for (id <- idSet) {
graph.addEdge(id, new PageRankEdge((id % vertices) + 1))
}
graph
}
sequential
"Steps limit" should {
"work for synchronous computations" in {
var allResultsCorrect = true
for (i <- 1 to 10) {
val graph = createCircleGraph(1000)
val execConfig = ExecutionConfiguration
.withSignalThreshold(0)
.withStepsLimit(1)
.withExecutionMode(ExecutionMode.Synchronous)
val info = graph.execute(execConfig)
val state = graph.forVertexWithId(1, (v: PageRankVertex) => v.state)
graph.shutdown
info.executionStatistics.terminationReason === TerminationReason.ComputationStepLimitReached
allResultsCorrect &= state === 0.2775
}
allResultsCorrect === true
}
}
"Convergence detection" should {
"work for asynchronous computations with one worker" in {
val graph = createCircleGraph(3, Some(1))
val info = graph.execute(ExecutionConfiguration.withSignalThreshold(0.0001))
val state = graph.forVertexWithId(1, (v: PageRankVertex) => v.state)
state > 0.99
val aggregate = graph.aggregate(new SumOfStates[Double]).get
if (info.executionStatistics.terminationReason != TerminationReason.Converged) {
println("Computation ended for the wrong reason: "+info.executionStatistics.terminationReason)
}
graph.shutdown
aggregate > 2.99 && info.executionStatistics.terminationReason == TerminationReason.Converged
}
}
"Global convergence" should {
"work for synchronous computations" in {
val graph = createCircleGraph(30)
val terminationCondition = new GlobalTerminationCondition(new SumOfStates[Double], 1, (sum: Option[Double]) =>
sum.isDefined && sum.get > 20.0 && sum.get < 29.0)
val execConfig = ExecutionConfiguration
.withSignalThreshold(0)
.withGlobalTerminationCondition(terminationCondition)
.withExecutionMode(ExecutionMode.Synchronous)
val info = graph.execute(execConfig)
val state = graph.forVertexWithId(1, (v: PageRankVertex) => v.state)
val aggregate = graph.aggregate(new SumOfStates[Double]).get
graph.shutdown
aggregate > 20.0 && aggregate < 29.0 && info.executionStatistics.terminationReason == TerminationReason.GlobalConstraintMet
}
"work for asynchronous computations" in {
val graph = createCircleGraph(100)
val terminationCondition = new GlobalTerminationCondition(new SumOfStates[Double], 1l, (sum: Option[Double]) =>
sum.isDefined && sum.get > 20.0)
val execConfig = ExecutionConfiguration
.withSignalThreshold(0)
.withGlobalTerminationCondition(terminationCondition)
val info = graph.execute(execConfig)
val state = graph.forVertexWithId(1, (v: PageRankVertex) => v.state)
val aggregate = graph.aggregate(new SumOfStates[Double]).get
if (aggregate <= 20.0) {
println("Computation ended before global condition was met.")
}
if (aggregate > 99.99999999) {
println("Computation converged completely instead of ending when the global constraint was met: "+aggregate)
}
if (info.executionStatistics.terminationReason != TerminationReason.GlobalConstraintMet) {
println("Computation ended for the wrong reason: "+info.executionStatistics.terminationReason)
}
graph.shutdown
aggregate > 20.0 && aggregate < 99.99999999 && info.executionStatistics.terminationReason == TerminationReason.GlobalConstraintMet
}
}
}
| gmazlami/dcop-maxsum | src/test/scala/com/signalcollect/features/ComputationTerminationSpec.scala | Scala | apache-2.0 | 6,351 |
import org.scalatest.{FunSuite, Matchers}
/**
* Created by inieto on 27/04/15.
*/
class _14_PartiallyAppliedFunctions extends FunSuite with Matchers {
test("A partially applied function is a function that you do not apply any or all the arguments, creating another function."
+ "This partially applied function doesn't apply any arguments.") {
def sum(a: Int, b: Int, c: Int) = a + b + c
val sum3 = sum _
sum3(1, 9, 7) should be(17)
sum(4, 5, 6) should be(15)
}
test("Partially applied functions can replace any number of arguments") {
def sum(a: Int, b: Int, c: Int) = a + b + c
val sumC = sum(1, 10, _: Int)
sumC(4) should be(15)
sum(4, 5, 6) should be(15)
}
}
| inieto/scala-47deg | ScalaExercises/src/test/scala-2.11/_14_PartiallyAppliedFunctions.scala | Scala | mit | 728 |
package teststate.core
import teststate.core.CoreExports._
import teststate.core.CoreExports2._
import teststate.data.Or
import teststate.typeclass.{ExecutionModel, ~~>}
case class Transformer[F [_], R , O , S , E ,
F2[_], R2, O2, S2, E2]
(actions : Actions[F, R, O, S, E] => Actions[F2, R2, O2, S2, E2],
invariants: Invariants [O, S, E] => Invariants [O2, S2, E2],
points : Points [O, S, E] => Points [O2, S2, E2],
arounds : Arounds [O, S, E] => Arounds [O2, S2, E2])
(implicit val f1: ExecutionModel[F], val f2: ExecutionModel[F2]) {
def mapF[X[_]](f: F2 ~~> X)(implicit x: ExecutionModel[X]) =
Transformer[F, R, O, S, E, X, R2, O2, S2, E2](
actions(_) trans f, invariants, points, arounds)
def mapR[X](f: X => R2) =
Transformer[F, R, O, S, E, F2, X, O2, S2, E2](
actions(_) mapR f, invariants, points, arounds)
def mapOS[X, Y](f: X => O2, g: Y => S2)(h: (Y, S2) => Y) =
Transformer[F, R, O, S, E, F2, R2, X, Y, E2](
actions (_).mapOS(f, g)(h),
invariants(_).mapOS(f, g),
points (_).mapOS(f, g),
arounds (_).mapOS(f, g))
def mapO[X](f: X => O2) =
Transformer[F, R, O, S, E, F2, R2, X, S2, E2](
actions (_) mapO f,
invariants(_) mapO f,
points (_) mapO f,
arounds (_) mapO f)
def mapS[X](g: X => S2)(h: (X, S2) => X) =
Transformer[F, R, O, S, E, F2, R2, O2, X, E2](
actions (_).mapS(g)(h),
invariants(_).mapS(g),
points (_).mapS(g),
arounds (_).mapS(g))
def mapE[X](f: E2 => X) =
Transformer[F, R, O, S, E, F2, R2, O2, S2, X](
actions (_) mapE f,
invariants(_) mapE f,
points (_) mapE f,
arounds (_) mapE f)
def pmapR[X](f: X => E2 Or R2) =
Transformer[F, R, O, S, E, F2, X, O2, S2, E2](
actions(_) pmapR f, invariants, points, arounds)
def pmapO[X](f: X => E2 Or O2) =
Transformer[F, R, O, S, E, F2, R2, X, S2, E2](
actions (_) pmapO f,
invariants(_) pmapO f,
points (_) pmapO f,
arounds (_) pmapO f)
// -------------------------------------------------------------------------------------------------------------------
def cmapF[X[_]](f: X ~~> F)(implicit x: ExecutionModel[X]) =
Transformer[X, R, O, S, E, F2, R2, O2, S2, E2](
_.trans(f) |> actions, invariants, points, arounds)
def cmapR[X](f: R => X) =
Transformer[F, X, O, S, E, F2, R2, O2, S2, E2](
_.mapR(f) |> actions, invariants, points, arounds)
def cmapOS[X, Y](f: O => X, g: S => Y)(h: (S, Y) => S) =
Transformer[F, R, X, Y, E, F2, R2, O2, S2, E2](
_.mapOS(f, g)(h) |> actions ,
_.mapOS(f, g) |> invariants,
_.mapOS(f, g) |> points ,
_.mapOS(f, g) |> arounds )
def cmapO[X](f: O => X) =
Transformer[F, R, X, S, E, F2, R2, O2, S2, E2](
_.mapO(f) |> actions ,
_.mapO(f) |> invariants,
_.mapO(f) |> points ,
_.mapO(f) |> arounds )
def cmapS[X](g: S => X)(h: (S, X) => S) =
Transformer[F, R, O, X, E, F2, R2, O2, S2, E2](
_.mapS(g)(h) |> actions ,
_.mapS(g) |> invariants,
_.mapS(g) |> points ,
_.mapS(g) |> arounds )
def cmapE[X](f: X => E) =
Transformer[F, R, O, S, X, F2, R2, O2, S2, E2](
_.mapE(f) |> actions ,
_.mapE(f) |> invariants,
_.mapE(f) |> points ,
_.mapE(f) |> arounds )
def cpmapR[X](f: R => E Or X) =
Transformer[F, X, O, S, E, F2, R2, O2, S2, E2](
_.pmapR(f) |> actions, invariants, points, arounds)
def cpmapO[X](f: O => E Or X) =
Transformer[F, R, X, S, E, F2, R2, O2, S2, E2](
_.pmapO(f) |> actions ,
_.pmapO(f) |> invariants,
_.pmapO(f) |> points ,
_.pmapO(f) |> arounds )
// object Auto {
// implicit def autoTransformActions [F[_], R, O, S, E, F2[_], R2, O2, S2, E2](a: Actions[F, R, O, S, E])(implicit t: Transformer[F, R, O, S, E, F2, R2, O2, S2, E2]): Actions [F2, R2, O2, S2, E2] = a.lift
// implicit def autoTransformInvariants[F[_], R, O, S, E, F2[_], R2, O2, S2, E2](a: Invariants[O, S, E]) (implicit t: Transformer[F, R, O, S, E, F2, R2, O2, S2, E2]): Invariants[O2, S2, E2] = a.lift
// implicit def autoTransformPoints [F[_], R, O, S, E, F2[_], R2, O2, S2, E2](a: Points [O, S, E]) (implicit t: Transformer[F, R, O, S, E, F2, R2, O2, S2, E2]): Points [O2, S2, E2] = a.lift
// implicit def autoTransformArounds [F[_], R, O, S, E, F2[_], R2, O2, S2, E2](a: Arounds [O, S, E]) (implicit t: Transformer[F, R, O, S, E, F2, R2, O2, S2, E2]): Arounds [O2, S2, E2] = a.lift
//
// implicit def autoTransformActions (x: Actions[F, R, O, S, E]): Actions [F2, R2, O2, S2, E2] = action (x)
// implicit def autoTransformInvariants(x: Invariants[O, S, E]) : Invariants[O2, S2, E2] = invariant(x)
// implicit def autoTransformPoints (x: Points [O, S, E]) : Points [O2, S2, E2] = point (x)
// implicit def autoTransformArounds (x: Arounds [O, S, E]) : Arounds [O2, S2, E2] = around (x)
// }
}
object Transformer {
def id[F[_], R, O, S, E](implicit f: ExecutionModel[F]) =
Transformer[F, R, O, S, E, F, R, O, S, E](
identity, identity, identity, identity)(f, f)
final class ActionOps[F[_], R, O, S, E](private val self: Actions[F, R, O, S, E]) extends AnyVal {
def lift[F2[_], R2, O2, S2, E2](implicit t: Transformer[F, R, O, S, E, F2, R2, O2, S2, E2]): Actions[F2, R2, O2, S2, E2] =
t.actions(self)
}
final class InvariantsOps[O, S, E](private val self: Invariants[O, S, E]) extends AnyVal {
def lift[F[_], R, F2[_], R2, O2, S2, E2](implicit t: Transformer[F, R, O, S, E, F2, R2, O2, S2, E2]): Invariants[O2, S2, E2] =
t.invariants(self)
}
final class PointsOps[O, S, E](private val self: Points[O, S, E]) extends AnyVal {
def lift[F[_], R, F2[_], R2, O2, S2, E2](implicit t: Transformer[F, R, O, S, E, F2, R2, O2, S2, E2]): Points[O2, S2, E2] =
t.points(self)
}
final class AroundsOps[O, S, E](private val self: Arounds[O, S, E]) extends AnyVal {
def lift[F[_], R, F2[_], R2, O2, S2, E2](implicit t: Transformer[F, R, O, S, E, F2, R2, O2, S2, E2]): Arounds[O2, S2, E2] =
t.arounds(self)
}
trait ToOps {
implicit def toActionTransformerOps[F[_], R, O, S, E](a: Actions[F, R, O, S, E]): ActionOps[F, R, O, S, E] = new ActionOps(a)
implicit def toInvariantsTransformerOps[O, S, E](a: Invariants[O, S, E]): InvariantsOps[O, S, E] = new InvariantsOps(a)
implicit def toPointsTransformerOps [O, S, E](a: Points [O, S, E]): PointsOps [O, S, E] = new PointsOps (a)
implicit def toAroundsTransformerOps [O, S, E](a: Arounds [O, S, E]): AroundsOps [O, S, E] = new AroundsOps (a)
}
}
| japgolly/test-state | core/shared/src/main/scala/teststate/core/Transformer.scala | Scala | apache-2.0 | 6,838 |
/*
* Copyright 2014-2021 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.atlas.chart.graphics
import java.awt.BasicStroke
import java.awt.Font
import java.awt.Stroke
import java.awt.image.BufferedImage
import java.util.concurrent.ConcurrentHashMap
import com.netflix.atlas.chart.util.Fonts
import com.netflix.iep.config.ConfigManager
object ChartSettings {
private val config = ConfigManager.dynamicConfig().getConfig("atlas.chart")
val defaultTheme: String = config.getString(s"theme.default")
private val themes = new ConcurrentHashMap[String, Theme]()
def theme(name: String): Theme = {
if (!config.hasPath(s"theme.$name")) {
throw new IllegalArgumentException(s"invalid theme name: '$name'")
} else {
themes.computeIfAbsent(name, n => {
val c = config.getConfig(s"theme.$n")
Theme(c)
})
}
}
/**
* For some of the font operations a graphics context is needed. This is a simple dummy instance
* that can be used for cases where we need to determine the size before the actual image object
* is created.
*/
val refImage = new BufferedImage(1, 1, BufferedImage.TYPE_INT_ARGB)
val refGraphics = refImage.createGraphics()
/** Dashed stroke typically used for grid lines. */
val dashedStroke: Stroke = {
new BasicStroke(
1.0f,
BasicStroke.CAP_BUTT,
BasicStroke.JOIN_MITER,
1.0f,
Array(1.0f, 1.0f),
0.0f
)
}
/**
* Base monospaced font used for graphics. Monospace is used to make the layout easier.
*/
val monospaceFont = Fonts.loadFont(config.getString("fonts.monospace"))
/** Small sized monospaced font. */
val smallFont = monospaceFont.deriveFont(10.0f)
/** Normal sized monospaced font. */
val normalFont = monospaceFont
/** Large sized monospaced font. */
val largeFont = monospaceFont.deriveFont(14.0f)
/** Dimensions for a character using the small font. */
val smallFontDims = dimensions(smallFont)
/** Dimensions for a character using the normal font. */
val normalFontDims = dimensions(normalFont)
/** Dimensions for a character using the large font. */
val largeFontDims = dimensions(largeFont)
/**
* Minimum width required for text elements. Value was chosen to allow typical messages to
* display with a reasonable level of wrapping.
*/
val minWidthForText = smallFontDims.width * "Warnings: abcdef".length
/**
* Minimum width required for text elements. Value was chosen to allow the typical legend with
* stats to show cleanly. It also keeps the cutoff below the level of sizes that are frequently
* used in practice.
*/
val minWidthForStats = smallFontDims.width * 45
/**
* Determine the dimensions for a single character using `font`. It is assumed that the font
* is monospaced.
*/
def dimensions(font: Font): Dimensions = {
refGraphics.setFont(font)
val m = refGraphics.getFontMetrics
Dimensions(m.stringWidth("X"), m.getHeight)
}
case class Dimensions(width: Int, height: Int)
}
| brharrington/atlas | atlas-chart/src/main/scala/com/netflix/atlas/chart/graphics/ChartSettings.scala | Scala | apache-2.0 | 3,597 |
package com.scalaAsm.x86
package Instructions
package General
// Description: Set Byte on Condition - below/not above or equal/carry (CF=1)
// Category: general/datamov
trait SETNAE extends InstructionDefinition {
val mnemonic = "SETNAE"
}
object SETNAE extends OneOperand[SETNAE] with SETNAEImpl
trait SETNAEImpl extends SETNAE {
implicit object _0 extends OneOp[rm8] {
val opcode: TwoOpcodes = (0x0F, 0x92) /+ 0
val format = RmFormat
}
}
| bdwashbu/scala-x86-inst | src/main/scala/com/scalaAsm/x86/Instructions/General/SETNAE.scala | Scala | apache-2.0 | 458 |
package com.searchlight.khronus.store
import com.searchlight.khronus.model.{ Metric, MetricMeasurement, _ }
import com.searchlight.khronus.util.{ Measurable, Settings, ConcurrencySupport }
import com.searchlight.khronus.util.log.Logging
import org.HdrHistogram.Histogram
import scala.collection.mutable
import scala.concurrent.duration._
import scala.concurrent.{ ExecutionContext, Future }
trait MetricMeasurementStoreSupport {
def metricStore: MetricMeasurementStore = CassandraMetricMeasurementStore
}
trait MetricMeasurementStore {
def storeMetricMeasurements(metricMeasurements: List[MetricMeasurement])
}
object CassandraMetricMeasurementStore extends MetricMeasurementStore with BucketSupport with MetaSupport with Logging with ConcurrencySupport with MonitoringSupport with TimeWindowsSupport with Measurable {
implicit val executionContext: ExecutionContext = executionContext("metric-receiver-worker")
private val rawDuration = 1 millis
private val storeGroupDuration = 5 seconds
def storeMetricMeasurements(metricMeasurements: List[MetricMeasurement]) = {
try {
store(metricMeasurements)
} catch {
case reason: Throwable β log.error("Failed receiving samples", reason)
}
}
private def store(metrics: List[MetricMeasurement]) = measureTime("measurementStore.store", "store metricMeasurements") {
log.info(s"Received samples of ${metrics.length} metrics")
val histos = mutable.Buffer[(Metric, () β HistogramBucket)]()
val counters = mutable.Buffer[(Metric, () β CounterBucket)]()
val now = System.currentTimeMillis()
metrics foreach (metricMeasurement β {
val metric = metricMeasurement.asMetric
val groupedMeasurements = metricMeasurement.measurements.groupBy(measurement β Timestamp(measurement.ts.getOrElse(now)).alignedTo(storeGroupDuration))
metric.mtype match {
case MetricType.Timer | MetricType.Gauge β histos ++= buildHistogramBuckets(metric, groupedMeasurements)
case MetricType.Counter β counters ++= buildCounterBuckets(metric, groupedMeasurements)
case _ β {
val msg = s"Discarding samples of $metric. Unknown metric type: ${metric.mtype}"
log.warn(msg)
}
}
})
val histogramsFuture = histogramBucketStore.store(histos, rawDuration)
val countersFuture = counterBucketStore.store(counters, rawDuration)
measureFutureTime("measurementStore.store.futures", "store metricMeasurements futures")(Future.sequence(Seq(histogramsFuture, countersFuture)))
}
private def buildHistogramBuckets(metric: Metric, groupedMeasurements: Map[Timestamp, List[Measurement]]): List[(Metric, () β HistogramBucket)] = {
track(metric)
groupedMeasurements.toList.map {
case (timestamp, measures) β
(metric, () β {
val histogram = HistogramBucket.newHistogram(maxValue(measures))
val bucketNumber = timestamp.toBucketNumberOf(rawDuration)
measures.foreach(measure β record(metric, measure, histogram))
new HistogramBucket(bucketNumber, histogram)
})
}
}
private def maxValue(measurements: List[Measurement]) = {
var maxValue = 0L
measurements.foreach { measurement β
if (measurement.values.nonEmpty) {
val value = measurement.values.max
if (value > maxValue) {
maxValue = value
}
}
}
maxValue
}
def record(metric: Metric, measure: Measurement, histogram: Histogram): Unit = {
skipNegativeValues(metric, measure.values).foreach(value β {
val highestTrackableValue = histogram.getHighestTrackableValue
if (value <= highestTrackableValue) histogram.recordValue(value)
else {
val exceeded = value - highestTrackableValue
log.warn(s"Sample of $metric has exceeded the highestTrackableValue of $highestTrackableValue by $exceeded. Truncating the excedent. Try changing the sampling unit or increasing the highestTrackableValue")
histogram.recordValue(highestTrackableValue)
}
})
}
private def buildCounterBuckets(metric: Metric, groupedMeasurements: Map[Timestamp, List[Measurement]]): List[(Metric, () β CounterBucket)] = {
track(metric)
groupedMeasurements.toList.map {
case (timestamp, measures) β
(metric, () β {
val bucketNumber = timestamp.toBucketNumberOf(rawDuration)
val counts = measures.map(measure β skipNegativeValues(metric, measure.values).sum).sum
new CounterBucket(bucketNumber, counts)
})
}
}
private def track(metric: Metric) = measureTime("measurementStore.track", "track metric") {
metaStore.snapshot.get(metric) collect { case (timestamp, active) β metaStore.notifyMetricMeasurement(metric, active) } getOrElse {
log.debug(s"Got a new metric: $metric. Will store metadata for it")
storeMetadata(metric)
}
}
private def storeMetadata(metric: Metric) = measureFutureTime("measurementStore.storeMetadata", "store metadata") {
metaStore.insert(metric)
}
private def skipNegativeValues(metric: Metric, values: Seq[Long]): Seq[Long] = {
val (invalidValues, okValues) = values.partition(value β value < 0)
if (invalidValues.nonEmpty)
log.warn(s"Skipping invalid values for metric $metric: $invalidValues")
okValues
}
private def alreadyProcessed(metric: Metric, rawBucketNumber: BucketNumber) = {
//get the bucket number in the smallest window duration
val measureBucket = rawBucketNumber ~ smallestWindow.duration
//get the current tick. The delay is to softly avoid out of sync clocks between nodes (another node start to process the tick)
if (Tick.alreadyProcessed(rawBucketNumber)) {
log.warn(s"Measurements for $metric marked to be reprocessed because their bucket number ($measureBucket) is less or equals than the current bucket tick (${Tick().bucketNumber})")
}
false
}
} | despegar/khronus | khronus-core/src/main/scala/com/searchlight/khronus/store/MetricMeasurementStore.scala | Scala | apache-2.0 | 5,975 |
package org.jetbrains.plugins.scala.testingSupport.scalatest.scala2_11.scalatest3_0_1
import org.jetbrains.plugins.scala.testingSupport.scalatest.staticStringTest._
class Scalatest2_11_3_0_1_StaticStringTest extends Scalatest2_11_3_0_1_Base
with FeatureSpecStaticStringTest
with FlatSpecStaticStringTest
with FreeSpecStaticStringTest
with FunSpecStaticStringTest
with FunSuiteStaticStringTest
with PropSpecStaticStringTest
with WordSpecStaticStringTest
with MethodsStaticStringTest
| JetBrains/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/testingSupport/scalatest/scala2_11/scalatest3_0_1/Scalatest2_11_3_0_1_StaticStringTest.scala | Scala | apache-2.0 | 500 |
package com.softwaremill.akka.stream.throttle
import com.softwaremill.akka.stream.throttle.IntervalBasedThrottlerSettings._
import org.scalatest.{FlatSpec, Matchers}
import scala.concurrent.duration._
class IntervalBasedThrottlerSettingsSpec extends FlatSpec with Matchers {
it should "return correct minimum interval between events" in {
assertInterval(2.perSecond, 500.millis)
assertInterval(20.perSecond, 50.millis)
assertInterval(200.per(1.second), 5.millis)
assertInterval(120.per(60.seconds), 500.millis)
assertInterval(120.per(1.minute), 500.millis)
assertInterval(120.per(1.minute), 500.millis)
assertInterval(1000.perSecond, 1.millis)
assertInterval(1000.perSecond, 1.millis)
}
private def assertInterval(ts: IntervalBasedThrottlerSettings, expectedInterval: FiniteDuration): Unit = {
ts.interval.compare(expectedInterval) shouldBe 0
}
}
| mkubala/akka-stream-throttle | src/test/scala/com/softwaremill/akka/stream/throttle/IntervalBasedThrottlerSettingsSpec.scala | Scala | gpl-2.0 | 897 |
/*
---------------------------------------------------------------------------
This software is released under a BSD license, adapted from
http://opensource.org/licenses/bsd-license.php
Copyright (c) 2010 Brian M. Clapper. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the names "clapper.org", "Scalasti", nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------
*/
import org.scalatest.FunSuite
import org.clapper.argot._
/** Tests the grizzled.io functions.
*/
class ArgotFlagTest extends FunSuite {
import ArgotConverters._
test("flag option success") {
val parser = new ArgotParser("test")
val flag = parser.flag[Boolean](List("y", "on"), List("n", "off"),
"Do something")
val data = List(
(true, Array("-y")),
(true, Array("--on")),
(false, Array("-n")),
(false, Array("--off")),
(true, Array("-y", "-n", "--on"))
)
for ((expected, args) <- data) {
parser.reset()
parser.parse(args)
assertResult(expected, args.mkString("[", ", ", "]") + " -> " + expected) {
flag.value.get
}
}
}
test("flag option failure") {
val parser = new ArgotParser("test")
val flag = parser.flag[Boolean](List("y", "on"), List("n", "off"),
"Do something")
intercept[ArgotUsageException] {
parser.parse(Array("-f"))
}
}
test("integer flag") {
val parser = new ArgotParser("test")
val flag = parser.flag[Int](List("y", "on"), List("n", "off"), "toggle") {
(onOff, opt) =>
import scala.math
val currentValue = opt.value.getOrElse(0)
val newValue = if (onOff) currentValue + 1 else currentValue - 1
math.max(0, newValue)
}
val data = List(
(Some(3), Array("--on", "--on", "--on")),
(None, Array.empty[String]),
(Some(0), Array("-y", "-y", "-n", "-n", "-n", "-n")),
(Some(1), Array("-y", "-y", "-y", "-n", "-n"))
)
for ((expected, args) <- data) {
parser.reset()
parser.parse(args)
assertResult(expected, args.mkString("[", ", ", "]") + " -> " + expected) {
flag.value
}
}
}
test("custom type flag") {
class MyFlag(val counter: Int)
val parser = new ArgotParser("test")
val flag = parser.flag[MyFlag](List("y"), List("n"), "a toggle") {
(onOff, opt) =>
import scala.math
val currentValue = opt.value.getOrElse(new MyFlag(0))
val newValue = if (onOff) currentValue.counter + 1
else currentValue.counter - 1
new MyFlag(math.max(0, newValue))
}
val data = List(
(Some(3), Array("-y", "-y", "-y")),
(None, Array.empty[String]),
(Some(0), Array("-y", "-y", "-n", "-n", "-n", "-n"))
)
for ((expected, args) <- data) {
parser.reset()
parser.parse(args)
assertResult(expected, args.mkString("[", ", ", "]") + " -> " + expected) {
flag.value.map(_.counter)
}
}
}
}
| bmc/argot | src/test/scala/org/clapper/argot/ArgotParser/flag.scala | Scala | bsd-3-clause | 4,466 |
package cats.derived
import cats.Show
import shapeless._
import shapeless.labelled._
trait ShowPretty[A] extends Show[A] {
def showLines(a: A): List[String]
def show(a: A): String = showLines(a).mkString("\n")
}
object ShowPretty {
implicit def fromShow[A](implicit s: Show[A]): ShowPretty[A] =
new ShowPretty[A] {
override def showLines(a: A): List[String] =
s.show(a).split("\\n").toList
}
}
trait MkShowPretty[A] extends ShowPretty[A]
object MkShowPretty extends MkShowPrettyDerivation {
def apply[A](implicit showPretty: MkShowPretty[A]): MkShowPretty[A] =
showPretty
}
trait MkShowPrettyDerivation extends MkShowPretty1 {
implicit val emptyProductDerivedShowPretty: MkShowPretty[HNil] =
instance(_ => Nil)
implicit def productDerivedShowPretty[K <: Symbol, V, T <: HList](
implicit key: Witness.Aux[K],
showV: ShowPretty[V] OrElse MkShowPretty[V],
showT: MkShowPretty[T]
): MkShowPretty[FieldType[K, V] :: T] = instance { fields =>
val fieldName = key.value.name
val fieldValueLines = showV.unify.showLines(fields.head)
val nextFields = showT.showLines(fields.tail)
val fieldValue = {
val head = fieldValueLines.headOption.mkString
if (nextFields.isEmpty || fieldValueLines.size > 1) head
else s"$head,"
}
val remainingLines =
if (fieldValueLines.size > 1) {
val tail = fieldValueLines.tail
if (nextFields.isEmpty) tail
else tail.init ++ tail.lastOption.map(s => s"$s,")
} else Nil
List(s"$fieldName = $fieldValue") ++ remainingLines ++ nextFields
}
implicit def emptyCoproductDerivedShowPretty: MkShowPretty[CNil] =
instance(_ => Nil)
}
trait MkShowPretty1 extends MkShowPretty2 {
implicit def coproductDerivedShowPretty[K <: Symbol, V, T <: Coproduct](
implicit key: Witness.Aux[K],
showV: ShowPretty[V] OrElse MkShowPretty[V],
showT: MkShowPretty[T]
): MkShowPretty[FieldType[K, V] :+: T] = instance {
case Inl(l) => showV.unify.showLines(l)
case Inr(r) => showT.showLines(r)
}
}
trait MkShowPretty2 extends MkShowPretty3 {
implicit def genericDerivedShowPrettyProduct[A, R <: HList](
implicit repr: LabelledGeneric.Aux[A, R],
t: Typeable[A],
s: Lazy[MkShowPretty[R]]
): MkShowPretty[A] = instance { a =>
val name = t.describe.takeWhile(_ != '[')
val contentLines = s.value.showLines(repr.to(a))
val contents = contentLines.map(s => s" $s")
List(s"$name(") ++ contents ++ List(")")
}
}
trait MkShowPretty3 {
protected def instance[A](f: A => List[String]): MkShowPretty[A] =
new MkShowPretty[A] {
def showLines(a: A): List[String] = f(a)
}
implicit def genericDerivedShowPrettyCoproduct[A, R <: Coproduct](
implicit repr: LabelledGeneric.Aux[A, R],
s: Lazy[MkShowPretty[R]]
): MkShowPretty[A] =
instance(a => s.value.showLines(repr.to(a)))
}
| milessabin/kittens | core/src/main/scala/cats/derived/showPretty.scala | Scala | apache-2.0 | 2,926 |
package es.weso.wiFetcher.entities
import es.weso.wiFetcher.entities.IndicatorType._
import es.weso.wiFetcher.entities.IndicatorHighLow._
import java.util.Date
import es.weso.wiFetcher.entities.traits.Component
import scala.collection.mutable.HashMap
import scala.collection.mutable.ListBuffer
case class Indicator(
val id: String = null,
val indicatorType: IndicatorType = null,
val labels: HashMap[String, String] = new HashMap[String, String](),
val comments: HashMap[String, String] = new HashMap[String, String](),
var intervalStarts: Int = 0,
var interfalFinishes: Int = 0,
var countriesCoverage: Int = 0,
val weight: Double = 0.0,
val highLow: IndicatorHighLow = null,
val source: String = "",
val component: Component = null,
val providers:ListBuffer[Provider] = ListBuffer.empty,
val republish : Boolean) {
override def equals(o: Any) = o match {
case that: Indicator => that.id.equalsIgnoreCase(this.id)
case _ => false
}
override def hashCode(): Int = {
id.hashCode()
}
} | weso/wiFetcher | app/es/weso/wiFetcher/entities/Indicator.scala | Scala | apache-2.0 | 1,033 |
package com.gjos.scala.swoc.protocol
object Move {
FastMove.init()
def apply(moveType: MoveType, from: Location, to: Location): Move =
if (moveType == MoveType.Pass) 0 else FastMove.encode(moveType)(from)(to)
def moveType(m: Move): MoveType = FastMove.moveType(m)
def from(m: Move): Location = FastMove.from(m)
def to(m: Move): Location = FastMove.to(m)
def toString(m: Move) = {
val mt = moveType(m)
val mv = if (mt == MoveType.Pass) "P" else if (mt == MoveType.Attack) "A" else "D"
Location.label(from(m)) + s"--$mv-->" + Location.label(to(m))
}
} | Oduig/swoc2014 | Greedy/src/main/scala/com/gjos/scala/swoc/protocol/Move.scala | Scala | apache-2.0 | 583 |
package de.htwg.zeta.server.model.modelValidator.validator.rules.nullChecks
import de.htwg.zeta.common.models.project.instance.GraphicalDslInstance
import de.htwg.zeta.server.model.modelValidator.validator.rules.ModelRule
/**
* This file was created by Tobias Droth as part of his master thesis at HTWG Konstanz (03/2017 - 09/2017).
*/
class NodeAttributesNoNullValues extends ModelRule {
override val name: String = getClass.getSimpleName
override val description: String = "A value of the attribute list inside a node is Null."
override val possibleFix: String = "Remove the Null value."
override def check(model: GraphicalDslInstance): Boolean = !model.nodes.flatMap(_.attributeValues.values).contains(null) // scalastyle:ignore null
}
| Zeta-Project/zeta | api/server/app/de/htwg/zeta/server/model/modelValidator/validator/rules/nullChecks/NodeAttributesNoNullValues.scala | Scala | bsd-2-clause | 752 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102.boxes
import org.mockito.Mockito._
import uk.gov.hmrc.ct.accounts.{MockFrs102AccountsRetriever, AccountsMoneyValidationFixture}
import uk.gov.hmrc.ct.accounts.frs102.retriever.Frs102AccountsBoxRetriever
import uk.gov.hmrc.ct.box.CtValidation
class AC114Spec extends AccountsMoneyValidationFixture[Frs102AccountsBoxRetriever] with MockFrs102AccountsRetriever {
override def setUpMocks() = {
super.setUpMocks()
import boxRetriever._
when(ac42()).thenReturn(AC42(Some(100)))
when(ac43()).thenReturn(AC43(Some(43)))
}
testAccountsMoneyValidationWithMin("AC114", 0, AC114.apply)
}
| hmrc/ct-calculations | src/test/scala/uk/gov/hmrc/ct/accounts/frs102/boxes/AC114Spec.scala | Scala | apache-2.0 | 1,249 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.integration.torch
import com.intel.analytics.bigdl.nn.{Sequential, SpatialDivisiveNormalization}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.RandomGenerator._
import scala.util.Random
@com.intel.analytics.bigdl.tags.Serial
class SpatialDivisiveNormalizationSpec extends TorchSpec {
"A SpatialDivisiveNormalization 3D input" should "generate correct output and grad" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val layer = new SpatialDivisiveNormalization[Double]()
val model = new Sequential[Double]()
model.add(layer)
Random.setSeed(3)
val input = Tensor[Double](1, 5, 5).apply1(e => Random.nextDouble())
val output = model.updateOutput(input).toTensor[Double]
val gradOutput = Tensor[Double]().resizeAs(output).apply1(e => Random.nextDouble())
val gradInput = model.backward(input, gradOutput)
val code = "torch.manualSeed(" + seed + ")\n" +
"""layer = nn.SpatialDivisiveNormalization()
model = nn.Sequential()
model:add(layer)
model:zeroGradParameters()
output = model:forward(input)
gradInput = model:backward(input, gradOutput)
"""
val (luaTime, torchResult) = TH.run(code,
Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput")
)
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
output should be(luaOutput)
gradInput should be(luaGradInput)
}
"A SpatialDivisiveNormalization" should "generate correct output and grad" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val layer = new SpatialDivisiveNormalization[Double]()
val model = new Sequential[Double]()
model.add(layer)
Random.setSeed(3)
val input = Tensor[Double](3, 1, 5, 5).apply1(e => Random.nextDouble())
val output = model.updateOutput(input).toTensor[Double]
val gradOutput = Tensor[Double]().resizeAs(output).apply1(e => Random.nextDouble())
val gradInput = model.backward(input, gradOutput)
val code = "torch.manualSeed(" + seed + ")\n" +
"""layer = nn.SpatialDivisiveNormalization()
model = nn.Sequential()
model:add(layer)
model:zeroGradParameters()
output = model:forward(input)
gradInput = model:backward(input, gradOutput)
"""
val (luaTime, torchResult) = TH.run(code,
Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput")
)
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
output should be(luaOutput)
gradInput should be(luaGradInput)
}
"A SpatialDivisiveNormalization(4)" should "generate correct output and grad" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val layer = new SpatialDivisiveNormalization[Double](4)
val model = new Sequential[Double]()
model.add(layer)
Random.setSeed(3)
val input = Tensor[Double](3, 4, 5, 5).apply1(e => Random.nextDouble())
val output = model.updateOutput(input).toTensor[Double]
val gradOutput = Tensor[Double]().resizeAs(output).apply1(e => Random.nextDouble())
val gradInput = model.backward(input, gradOutput)
val code = "torch.manualSeed(" + seed + ")\n" +
"""layer = nn.SpatialDivisiveNormalization(4)
model = nn.Sequential()
model:add(layer)
model:zeroGradParameters()
output = model:forward(input)
gradInput = model:backward(input, gradOutput)
"""
val (luaTime, torchResult) = TH.run(code,
Map("input" -> input, "gradOutput" -> gradOutput),
Array("output", "gradInput")
)
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
output should be(luaOutput)
gradInput should be(luaGradInput)
}
"SpatialDivisiveNormalization(4, kernel)" should "generate correct output and grad" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val kernel = Tensor[Double](7).apply1(e => Random.nextDouble())
val model = new SpatialDivisiveNormalization[Double](4, kernel)
Random.setSeed(3)
val input = Tensor[Double](3, 4, 5, 5).apply1(e => Random.nextDouble())
val output = model.updateOutput(input)
val gradOutput = Tensor[Double]().resizeAs(output).apply1(e => Random.nextDouble())
val gradInput = model.backward(input, gradOutput)
val code = "torch.manualSeed(" + seed + ")\n" +
"""model = nn.SpatialDivisiveNormalization(4, kernel)
model:zeroGradParameters()
output = model:forward(input)
gradInput = model:backward(input, gradOutput)
"""
val (luaTime, torchResult) = TH.run(code,
Map("input" -> input, "gradOutput" -> gradOutput, "kernel" -> kernel),
Array("output", "gradInput")
)
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
output should be(luaOutput)
gradInput should be(luaGradInput)
}
}
| zhangxiaoli73/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/integration/torch/SpatialDivisiveNormalizationSpec.scala | Scala | apache-2.0 | 5,854 |
package defend.shard
import akka.actor.{ ActorLogging, ActorRef, Props }
import akka.persistence._
import defend.model.DefenceTower
import defend.shard.MessageLostTracker.LastMessageId
import defend.ui.StatusKeeper
import pl.project13.scala.rainbow._
class MessageLostTracker(tower: DefenceTower, statusKeeper: ActorRef) extends PersistentActor with ActorLogging {
def persistenceId: String = s"messageLostTracker-${tower.name}"
private var lastMessageId = 0
val snapshotEvery: Int = 30
private val created = System.currentTimeMillis()
override def receiveRecover: Receive = {
case LastMessageId(id) => lastMessageId = id
case SnapshotOffer(criteria: SnapshotMetadata, LastMessageId(id)) =>
lastMessageId = id
case RecoveryCompleted =>
println(s"Recover completed for $persistenceId in ${System.currentTimeMillis() - created}ms".green)
log.info("Recover completed for {} in {}ms", persistenceId, System.currentTimeMillis() - created)
}
override protected def onRecoveryFailure(cause: Throwable, event: Option[Any]): Unit = {
super.onRecoveryFailure(cause, event)
println(s"Recovery for $persistenceId failed: ${cause.getMessage}".red)
log.error(cause, "Recovery failed!")
}
override def receiveCommand: Receive = {
case m @ LastMessageId(id) =>
log.debug("received LastMessage({})", id)
persist(m) {
x =>
if (id - lastMessageId > 1) {
statusKeeper ! StatusKeeper.Protocol.LostMessages(tower, id - lastMessageId, System.currentTimeMillis())
}
lastMessageId = id
}
if (lastMessageId % snapshotEvery == 0) {
log.debug("Saving snapshot")
saveSnapshot(m)
}
case SaveSnapshotSuccess(metadata) =>
log.info("Snapshot save successfully")
deleteMessages(metadata.sequenceNr - 1)
case SaveSnapshotFailure(metadata, cause) =>
log.error(cause, s"Snapshot not saved: ${cause.getMessage}")
println(s"Snapshot for $persistenceId not saved: $cause".red)
}
}
object MessageLostTracker {
def props(tower: DefenceTower, statusKeeper: ActorRef): Props = {
Props(new MessageLostTracker(tower, statusKeeper))
}
case class LastMessageId(id: Int)
}
| otrebski/reactive-missile-defend | src/main/scala/defend/shard/MessageLostTracker.scala | Scala | apache-2.0 | 2,239 |
package com.twitter.finagle.mux.transport
import com.twitter.finagle.{Path, tracing, Dtab, Dentry}
import com.twitter.io.Charsets
import com.twitter.util.Time
import com.twitter.util.TimeConversions.intToTimeableNumber
import org.jboss.netty.buffer.ChannelBuffers
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.{AssertionsForJUnit, JUnitRunner}
import scala.collection.mutable
@RunWith(classOf[JUnitRunner])
class MessageTest extends FunSuite with AssertionsForJUnit {
import Message._
def buf(n: Int) = ChannelBuffers.wrappedBuffer((0 until n).toArray.map(_.toByte))
val body = buf(4)
val goodTags = Seq(8388607, 1, 123)
val goodVersions = Seq(100: Short, 200: Short, 300: Short)
val goodTraceIds = Seq(None, Some(tracing.Trace.nextId))
val goodBufs = Seq(ChannelBuffers.EMPTY_BUFFER, buf(1), buf(4), buf(100))
val goodStrings = Seq("", "Hello, world!", "βΊβΉ")
val goodKeys = goodStrings map { s =>
val bytes = s.getBytes(Charsets.Utf8)
ChannelBuffers.wrappedBuffer(bytes)
}
val goodDentries = Seq("/a=>/b", "/foo=>/$/inet/twitter.com/80") map(Dentry.read)
val goodDtabs = goodDentries.permutations map { ds => Dtab(ds.toIndexedSeq) }
val goodDests = Seq("/", "/okay", "/foo/bar/baz") map(Path.read)
val goodDurationLeases = Seq(Message.Tlease.MinLease, Message.Tlease.MaxLease)
val goodTimeLeases = Seq(Time.epoch, Time.now, Time.now + 5.minutes)
val goodContexts =
Seq() ++ (for { k <- goodKeys; v <- goodBufs } yield (k, v)).combinations(2).toSeq
test("d(e(m)) == m") {
val ms = mutable.Buffer[Message]()
ms ++= (for {
tag <- goodTags
version <- goodVersions
ctx <- goodContexts
} yield Tinit(tag, version, ctx))
ms ++= (for {
tag <- goodTags
version <- goodVersions
ctx <- goodContexts
} yield Rinit(tag, version, ctx))
ms ++= (for {
tag <- goodTags
traceId <- goodTraceIds
body <- goodBufs
} yield Treq(tag, traceId, body))
ms ++= (for {
tag <- goodTags
body <- goodBufs
} yield RreqOk(tag, body))
ms ++= (for {
tag <- goodTags
} yield Tdrain(tag))
ms ++= (for {
tag <- goodTags
reason <- goodStrings
} yield Tdiscarded(tag, reason))
ms ++= (for {
tag <- goodTags
ctx <- goodContexts
dest <- goodDests
dtab <- goodDtabs
body <- goodBufs
} yield Tdispatch(tag, ctx, dest, dtab, body))
ms ++= (for {
tag <- goodTags
ctx <- goodContexts
body <- goodBufs
} yield RdispatchOk(tag, ctx, body))
ms ++= (for {
tag <- goodTags
ctx <- goodContexts
err <- goodStrings
} yield RdispatchError(tag, ctx, err))
ms ++= (for {
tag <- goodTags
ctx <- goodContexts
} yield RdispatchNack(tag, ctx))
ms ++= (for {
lease <- goodDurationLeases
} yield Tlease(lease))
ms ++= (for {
lease <- goodTimeLeases
} yield Tlease(lease))
def assertEquiv(a: Message, b: Message) = (a, b) match {
case (Tdispatch(tag1, ctxs1, dst1, dtab1, req1),
Tdispatch(tag2, ctxs2, dst2, dtab2, req2)) =>
assert(
tag1 == tag2 && ctxs1 == ctxs2 && dst1 == dst2 &&
Equiv[Dtab].equiv(dtab1, dtab2) && req1 == req2)
case (a, b) => assert(a == b)
}
// Debugging tip: in an error message, 'm' is the RHS.
for (m <- ms)
assertEquiv(decode(encode(m)), m)
}
test("not encode invalid messages") {
assert(intercept[BadMessageException] {
encode(Treq(-1, Some(tracing.Trace.nextId), body))
} == BadMessageException("invalid tag number -1"))
assert(intercept[BadMessageException] {
encode(Treq(1 << 24, Some(tracing.Trace.nextId), body))
} == BadMessageException("invalid tag number 16777216"))
}
test("not decode invalid messages") {
assert(intercept[BadMessageException] {
decode(ChannelBuffers.EMPTY_BUFFER)
} == BadMessageException("short message"))
assert(intercept[BadMessageException] {
decode(ChannelBuffers.wrappedBuffer(Array[Byte](0, 0, 0, 1)))
} == BadMessageException("unknown message type: 0 [tag=1]"))
}
test("decode fragments") {
val msgs = Seq(
Tdispatch(Message.Tags.setMsb(goodTags.head),
goodContexts.head,
goodDests.head,
Dtab.empty,
goodBufs.head),
RdispatchOk(Message.Tags.setMsb(goodTags.last),
goodContexts.last,
goodBufs.last))
for (m <- msgs) {
assert(decode(encode(m)) == Fragment(m.typ, m.tag, m.buf))
}
}
test("extract control messages") {
val tag = 0
val buf = ChannelBuffers.EMPTY_BUFFER
assert(ControlMessage.unapply(Treq(tag, None, buf)) == None)
assert(ControlMessage.unapply(RreqOk(0, buf)) == None)
assert(ControlMessage.unapply(Tdispatch(tag, Seq.empty, Path.empty, Dtab.empty, buf)) == None)
assert(ControlMessage.unapply(RdispatchOk(tag, Seq.empty, buf)) == None)
assert(ControlMessage.unapply(Tdrain(tag)) == Some(tag))
assert(ControlMessage.unapply(Rdrain(tag)) == Some(tag))
assert(ControlMessage.unapply(Tping(tag)) == Some(tag))
assert(ControlMessage.unapply(Rping(tag)) == Some(tag))
assert(ControlMessage.unapply(Tdiscarded(tag, "")) == Some(tag))
assert(ControlMessage.unapply(Tlease(0, 0L)) == Some(tag))
}
}
| lukiano/finagle | finagle-mux/src/test/scala/com/twitter/finagle/mux/transport/MessageTest.scala | Scala | apache-2.0 | 5,366 |
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datamountaineer.streamreactor.connect.elastic6.indexname
import scala.annotation.tailrec
class InvalidCustomIndexNameException(message: String) extends RuntimeException(message)
case class CustomIndexName(fragments: Vector[IndexNameFragment]) {
override def toString: String = fragments.map(_.getFragment).mkString
}
object CustomIndexName {
@tailrec
private def parseIndexName(remainingChars: Vector[Char], currentFragment: StringBuilder, results: Vector[Option[IndexNameFragment]]): Vector[IndexNameFragment] =
remainingChars match {
case head +: rest => head match {
case DateTimeFragment.OpeningChar =>
val (dateTimeFormat, afterDateTimeFormatIncludingClosingChar) = rest.span { _ != DateTimeFragment.ClosingChar }
val afterDateTimeFormat = afterDateTimeFormatIncludingClosingChar.tail
val maybeCurrentFragment = currentFragment.mkString.toOption
val maybeDateTimeFormat = dateTimeFormat.mkString.toOption
val newResultsWithDateTimeFragment = results :+ maybeCurrentFragment.map(TextFragment.apply) :+ maybeDateTimeFormat.map(DateTimeFragment(_))
parseIndexName(afterDateTimeFormat, new StringBuilder, newResultsWithDateTimeFragment)
case DateTimeFragment.ClosingChar => throw new InvalidCustomIndexNameException(s"Found closing '${DateTimeFragment.ClosingChar}' but no opening character")
case anyOtherChar => parseIndexName(rest, currentFragment.append(anyOtherChar), results)
}
case Vector() =>
val maybeCurrentFragment = currentFragment.mkString.toOption
(results :+ maybeCurrentFragment.map(TextFragment.apply)).flatten
}
def parseIndexName(indexName: String): CustomIndexName =
CustomIndexName(parseIndexName(indexName.toVector, new StringBuilder, Vector.empty))
}
| datamountaineer/stream-reactor | kafka-connect-elastic6/src/main/scala/com/datamountaineer/streamreactor/connect/elastic6/indexname/CustomIndexName.scala | Scala | apache-2.0 | 2,484 |
package ruby.models
import com.bryzek.apidoc.generator.v0.models.InvocationForm
import org.scalatest.{ShouldMatchers, FunSpec}
class ExampleUnionTypesSpec extends FunSpec with ShouldMatchers {
private lazy val service = models.TestHelper.parseFile(s"/examples/apidoc-example-union-types.json")
it("generates expected code for ruby client") {
RubyClientGenerator.invoke(InvocationForm(service = service)) match {
case Left(errors) => fail(errors.mkString(", "))
case Right(sourceFiles) => {
sourceFiles.size shouldBe 1
models.TestHelper.assertEqualsFile("/example-union-types-ruby-client.txt", sourceFiles.head.contents)
}
}
}
}
| Seanstoppable/apidoc-generator | ruby-generator/src/test/scala/models/ExampleUnionTypesSpec.scala | Scala | mit | 681 |
/*
* The Bluejelly project, Copyright 2012.
*
* This source code is distributed under the terms of
* the BSD license, see the LICENSE file for details.
*/
package bluejelly.asm
import scala.annotation.tailrec
import scala.language.postfixOps
import scala.util.control.Exception.catching
import scala.util.parsing.combinator.JavaTokenParsers
import scala.util.parsing.input.Positional
/**
* Parse an assembler module.
* @author ppedemon
*/
object Parser extends JavaTokenParsers {
// Precondition: s is of the form "..."
def unquote(s:String) = s.substring(1, s.length - 1)
// Deal with escape sequences, except \\\\uHHHH which must be handled
// separately in a preprocessing stage (see UnicodeFilter)
def esc = """(\\\\[btnfr"'\\\\]|\\\\[0-3]?[0-7]{1,2})"""
val escMap = Map(
'b' -> 8.toChar,
't' -> '\\t',
'n' -> '\\n',
'f' -> '\\f',
'r' -> '\\r',
'"' -> '"' ,
'\\'' -> '\\'',
'\\\\' -> '\\\\')
@tailrec
private def unesc(s:String,r:StringBuilder=new StringBuilder):String = {
@tailrec
def oct(s:String,n:Int=0):(String,Int) =
if (s.isEmpty) (s,n) else
s.head match {
case x if x.isDigit => oct(s.tail, (n<<3)|(x.toInt & 0xf))
case _ => (s,n)
}
def esc(s:String) = (s.tail, escMap(s.head))
if (s.isEmpty) r.toString else
s.head match {
case '\\\\' => s.tail.head match {
case c if c.isDigit =>
val (rest,n) = oct(s.tail)
unesc(rest, r+n.toChar)
case _ =>
val (rest,c) = esc(s.tail)
unesc(rest, r+c)
}
case c => unesc(s.tail, r+c)
}
}
def id = """[a-zA-Z_\\$][\\w\\$]*"""
def simpleIdent = id.r
override val whiteSpace = """([ \\t\\x0B\\f\\r\\n]|#[^\\n]*)*""".r
override def ident = (id + "(\\\\." + id + ")*").r
override def floatingPointNumber = """-?(\\d+(\\.\\d*)|\\d*\\.\\d+)([eE][+-]?\\d+)?""".r
override def stringLiteral =
("\\"" + """([^"\\p{Cntrl}\\\\]|""" + esc + ")*" + "\\"").r ^^
{ s => unesc(unquote(s)) }
def charLiteral:Parser[Char] =
("""'([^'\\p{Cntrl}\\\\]|""" + esc + ")'").r ^^
{ s => unesc(unquote(s)) charAt 0 }
def hexNum = "0(x|X)".r ~> """[a-fA-F0-9]+""".r ^^ {Integer.parseInt(_,16)}
def octNum = "0(o|O)".r ~> """[0-7]+""".r ^^ {Integer.parseInt(_,8)}
def pint:Parser[Int] = hexNum | octNum | (wholeNumber ^^ {_.toInt})
def pdbl:Parser[Double] =
floatingPointNumber ^^ {_.toDouble} | wholeNumber ^^ {_.toDouble}
def intint:((Int,Int) => Instr) => Parser[Instr] = f =>
pint ~ ("," ~> pint) ^^ {case x ~ y => f(x,y)}
def intid:((Int,String) => Instr) => Parser[Instr] = f =>
pint ~ ("," ~> ident) ^^ {case x ~ y => f(x,y)}
def idint:((String,Int) => Instr) => Parser[Instr] = f =>
ident ~ ("," ~> pint) ^^ {case x ~ y => f(x,y)}
// Shorthand for positioned combinator
def $[T <: Positional](p:Parser[T]) = positioned(p)
def table = Map(
"enter" -> success(Enter),
"ret" -> success(Return),
"raise" -> success(Raise),
"catch" -> success(Catch),
"jmp" -> (ident ^^ {new Jump(_)}),
"evalvar" -> intid (new EvalVar(_,_)),
"retcon" -> intint (new RetCon(_,_)),
"retint" -> (pint ^^ {new RetInt(_)}),
"retstr" -> (stringLiteral ^^ {new RetStr(_)}),
"retdbl" -> (pdbl ^^ {new RetDbl(_)}),
"retchr" -> (charLiteral ^^ {new RetChr(_)}),
"stack" -> (pint ^^ {new StackCheck(_)}),
"dumpstack" -> (stringLiteral ^^ {new DumpStack(_)}),
"pushvar" -> (pint ^^ {new PushVar(_)}),
"pushint" -> (pint ^^ {new PushInt(_)}),
"pushdbl" -> (pdbl ^^ {new PushDbl(_)}),
"pushstr" -> (stringLiteral ^^ {new PushStr(_)}),
"pushchr" -> (charLiteral ^^ {new PushChr(_)}),
"pushcode" -> (ident ^^ {new PushCode(_)}),
"pushcont" -> (ident ^^ {new PushCont(_)}),
"slide" -> intint (new Slide(_,_)),
"mkapp" -> (pint ^^ {new MkApp(_)}),
"mknapp" -> (pint ^^ {new MkNapp(_)}),
"newapp" -> success(AllocApp),
"newnapp" -> success(AllocNapp),
"packapp" -> intint (new PackApp(_,_)),
"packnapp" -> intint (new PackNapp(_,_)),
"mkcon" -> intint (new MkTyCon(_,_)),
"newcon" -> (pint ^^ {new AllocTyCon(_)}),
"packcon" -> intint (new PackTyCon(_,_)),
"matchcon" -> genMatch[Int](pint, new MatchCon(_,_)),
"matchint" -> genMatch[Int](pint, new MatchInt(_,_)),
"matchstr" -> genMatch[String](stringLiteral, new MatchStr(_,_)),
"matchdbl" -> genMatch[Double](pdbl, new MatchDbl(_,_)),
"matchchr" -> genMatch[Char](charLiteral, new MatchChr(_,_))
)
// Parse a single simple instruction (this excludes match instructions)
def instr:Parser[Instr] = {
def handler(i:String):PartialFunction[Throwable,Parser[Instr]] = {
case _ => failure("Invalid instruction: " + i)
}
$(ident >> {instr => catching (handler(instr)) (table(instr))})
}
// Parse an instruction block
def block:Parser[Block] = (instr+) ^^ {new Block(_)}
// Parse a case alternative
def alt[T](p:Parser[T]):Parser[Alt[T]] =
(".case" ~> p <~ ":") ~ block ^^ {case v ~ b => new Alt(v,b)}
// Parse a sequence of one or more case alternatives
def manyAlts[T](p:Parser[T]):Parser[List[Alt[T]]] = alt(p)*
// Parse a default alternative
def deflt:Parser[Block] = ".default" ~ ":" ~> block
// Generic match instruction parser
def genMatch[T](p:Parser[T],f:(List[Alt[T]],Option[Block]) => Instr):Parser[Instr] = {
manyAlts(p) ~ (deflt?) <~ ".end" ^^ {case as ~ d => f(as,d)}
}
// Parse function arguments: arity and matcher flag
def funArgs:Parser[(Int,Boolean)] = {
def arg:Parser[Any] = ("arity" ~ "=" ~> pint) | ("matcher" ^^^ true)
"[" ~> repsep(arg,",") <~ "]" ^^ { as =>
var arity = 0
var matcher = false
for (a <- as) a match {
case x:Int => arity = x
case x:Boolean => matcher = x
}
(arity,matcher)
}
}
// Parse a function header
def funHeader:Parser[~[String,(Int,Boolean)]] =
simpleIdent ~! (funArgs?) ^^ {case n~a => new ~(n,a.getOrElse((0,false)))} |
funArgs ~ simpleIdent ^^ {case a~n => new ~(n,a)}
// Parse a complete function
def fun:Parser[Function] =
$((".fun" ~> funHeader <~ ":") ~ block <~ ".end" ^^
{case n ~ a ~ b => new Function(n, a._1, a._2, b)})
// Parse a whole module
def module:Parser[Module] = {
(".module" ~> ident <~ ":") ~ (fun*) <~ ".end" ^^ {
case n ~ funs => new Module(n, funs)
}
}
}
| ppedemon/Bluejelly | bluejelly-asm/src/main/scala/bluejelly/asm/Parser.scala | Scala | bsd-3-clause | 6,562 |
package time
import text.StringOption
/**
* @author K.Sakamoto
* Created on 15/10/19
*/
trait TimeExtractor {
def extract(text: StringOption): Seq[TimeTmp]
def extractUnionTime(text: StringOption): TimeTmp = {
val timeRanges: Seq[TimeTmp] = extract(text)
TimeMerger.union(timeRanges)
}
def extractIntersectTime(text: StringOption): TimeTmp = {
val timeRanges: Seq[TimeTmp] = extract(text)
TimeMerger.intersect(timeRanges)
}
}
| ktr-skmt/FelisCatusZero | src/main/scala/time/TimeExtractor.scala | Scala | apache-2.0 | 467 |
package com.wavesplatform
import com.wavesplatform.account.{Address, AddressOrAlias, KeyPair}
import com.wavesplatform.block.{Block, MicroBlock}
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.common.utils._
import com.wavesplatform.history.DefaultBaseTarget
import com.wavesplatform.lang.script.Script
import com.wavesplatform.lang.v1.compiler.Terms.FUNCTION_CALL
import com.wavesplatform.protobuf.block.PBBlocks
import com.wavesplatform.state.StringDataEntry
import com.wavesplatform.transaction.Asset.{IssuedAsset, Waves}
import com.wavesplatform.transaction.assets.IssueTransaction
import com.wavesplatform.transaction.lease.{LeaseCancelTransaction, LeaseTransaction}
import com.wavesplatform.transaction.smart.{InvokeScriptTransaction, SetScriptTransaction}
import com.wavesplatform.transaction.transfer.TransferTransaction
import com.wavesplatform.transaction.{DataTransaction, Transaction, TxVersion}
import com.wavesplatform.utils._
import org.scalacheck.Gen
trait BlocksTransactionsHelpers { self: TransactionGen =>
object QuickTX {
val FeeAmount = 400000
def transfer(
from: KeyPair,
to: AddressOrAlias = accountGen.sample.get.toAddress,
amount: Long = smallFeeGen.sample.get,
timestamp: Gen[Long] = timestampGen
): Gen[Transaction] =
for {
timestamp <- timestamp
} yield TransferTransaction.selfSigned(1.toByte, from, to, Waves, amount, Waves, FeeAmount, ByteStr.empty, timestamp).explicitGet()
def transferV2(
from: KeyPair,
to: AddressOrAlias = accountGen.sample.get.toAddress,
amount: Long = smallFeeGen.sample.get,
timestamp: Gen[Long] = timestampGen
): Gen[Transaction] =
for {
timestamp <- timestamp
} yield TransferTransaction.selfSigned(2.toByte, from, to, Waves, amount, Waves, FeeAmount, ByteStr.empty, timestamp).explicitGet()
def transferAsset(
asset: IssuedAsset,
from: KeyPair,
to: AddressOrAlias = accountGen.sample.get.toAddress,
amount: Long = smallFeeGen.sample.get,
timestamp: Gen[Long] = timestampGen
): Gen[Transaction] =
for {
timestamp <- timestamp
} yield TransferTransaction.selfSigned(1.toByte, from, to, asset, amount, Waves, FeeAmount, ByteStr.empty, timestamp).explicitGet()
def lease(
from: KeyPair,
to: AddressOrAlias = accountGen.sample.get.toAddress,
amount: Long = smallFeeGen.sample.get,
timestamp: Gen[Long] = timestampGen
): Gen[LeaseTransaction] =
for {
timestamp <- timestamp
} yield LeaseTransaction.selfSigned(1.toByte, from, to, amount, FeeAmount, timestamp).explicitGet()
def leaseCancel(from: KeyPair, leaseId: ByteStr, timestamp: Gen[Long] = timestampGen): Gen[LeaseCancelTransaction] =
for {
timestamp <- timestamp
} yield LeaseCancelTransaction.selfSigned(1.toByte, from, leaseId, FeeAmount, timestamp).explicitGet()
def data(from: KeyPair, dataKey: String, timestamp: Gen[Long] = timestampGen): Gen[DataTransaction] =
for {
timestamp <- timestamp
} yield DataTransaction.selfSigned(1.toByte, from, List(StringDataEntry(dataKey, Gen.numStr.sample.get)), FeeAmount, timestamp).explicitGet()
def nftIssue(from: KeyPair, timestamp: Gen[Long] = timestampGen): Gen[IssueTransaction] =
for {
timestamp <- timestamp
} yield IssueTransaction(
TxVersion.V1,
from.publicKey,
"test".utf8Bytes,
Array.emptyByteArray,
1,
0,
reissuable = false,
script = None,
100000000L,
timestamp
).signWith(from.privateKey)
def setScript(from: KeyPair, script: Script, timestamp: Gen[Long] = timestampGen): Gen[SetScriptTransaction] =
for {
timestamp <- timestamp
} yield SetScriptTransaction.selfSigned(1.toByte, from, Some(script), FeeAmount, timestamp).explicitGet()
def invokeScript(
from: KeyPair,
dapp: Address,
call: FUNCTION_CALL,
payments: Seq[InvokeScriptTransaction.Payment] = Nil,
timestamp: Gen[Long] = timestampGen
): Gen[InvokeScriptTransaction] =
for {
timestamp <- timestamp
} yield InvokeScriptTransaction.selfSigned(1.toByte, from, dapp, Some(call), payments, FeeAmount * 2, Waves, timestamp).explicitGet()
}
object UnsafeBlocks {
def unsafeChainBaseAndMicro(
totalRefTo: ByteStr,
base: Seq[Transaction],
micros: Seq[Seq[Transaction]],
signer: KeyPair,
version: Byte,
timestamp: Long
): (Block, Seq[MicroBlock]) = {
val block = unsafeBlock(totalRefTo, base, signer, version, timestamp)
val microBlocks = micros
.foldLeft((block, Seq.empty[MicroBlock])) {
case ((lastTotal, allMicros), txs) =>
val (newTotal, micro) = unsafeMicro(totalRefTo, lastTotal, txs, signer, version, timestamp)
(newTotal, allMicros :+ micro)
}
._2
(block, microBlocks)
}
def unsafeMicro(
totalRefTo: ByteStr,
prevTotal: Block,
txs: Seq[Transaction],
signer: KeyPair,
version: TxVersion,
ts: Long
): (Block, MicroBlock) = {
val newTotalBlock = unsafeBlock(totalRefTo, prevTotal.transactionData ++ txs, signer, version, ts)
val unsigned = new MicroBlock(version, signer.publicKey, txs, prevTotal.id(), newTotalBlock.signature, ByteStr.empty)
val signature = crypto.sign(signer.privateKey, unsigned.bytes())
val signed = unsigned.copy(signature = signature)
(newTotalBlock, signed)
}
def unsafeBlock(
reference: ByteStr,
txs: Seq[Transaction],
signer: KeyPair,
version: Byte,
timestamp: Long,
bTarget: Long = DefaultBaseTarget
): Block = {
val unsigned: Block = Block.create(
version = version,
timestamp = timestamp,
reference = reference,
baseTarget = bTarget,
generationSignature = com.wavesplatform.history.generationSignature,
generator = signer.publicKey,
featureVotes = Seq.empty,
rewardVote = -1L,
transactionData = txs
)
val toSign =
if (version < Block.ProtoBlockVersion) unsigned.bytes()
else PBBlocks.protobuf(unsigned).header.get.toByteArray
unsigned.copy(signature = crypto.sign(signer.privateKey, toSign))
}
}
}
| wavesplatform/Waves | node/src/test/scala/com/wavesplatform/BlocksTransactionsHelpers.scala | Scala | mit | 6,508 |
/***
* Copyright 2014 Rackspace US, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.rackspace.com.papi.components.checker
import com.rackspace.cloud.api.wadl.Converters._
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatestplus.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class GivenAWadlWithRolesAtResourceLevel extends FlatSpec with RaxRolesBehaviors {
val description = "Wadl With Roles At Resource Level"
val validator = Validator((localWADLURI,
<application xmlns="http://wadl.dev.java.net/2009/02" xmlns:rax="http://docs.rackspace.com/api">
<resources base="https://test.api.openstack.com">
<resource path="/a" rax:roles="a:admin">
<method name="POST"/>
<method name="GET"/>
<method name="PUT" rax:roles="a:observer"/>
<method name="DELETE" rax:roles="a:observer a:admin a:creator"/>
</resource>
</resources>
</application>)
, configWithRolesEnabled)
// When a single value rax:roles at resource level but not at method level
it should behave like accessIsAllowed(validator, "GET", "/a", List("a:admin"), description)
it should behave like accessIsForbidden(validator, "GET", "/a", List("a:observer"), description)
it should behave like accessIsForbidden(validator, "GET", "/a", List("b:observer"), description)
it should behave like accessIsForbidden(validator, "GET", "/a", List("b:creator"), description)
it should behave like accessIsForbiddenWhenNoXRoles(validator, "GET", "/a", description)
it should behave like accessIsAllowed(validator, "POST", "/a", List("a:admin"), description)
it should behave like accessIsForbidden(validator, "POST", "/a", List("a:observer"), description)
it should behave like accessIsForbiddenWhenNoXRoles(validator, "POST", "/a", description)
// PUT has resource level a:admin, method level a:observer
it should behave like accessIsAllowed(validator, "PUT", "/a", List("a:admin"), description)
it should behave like accessIsAllowed(validator, "PUT", "/a", List("a:observer"), description)
it should behave like accessIsAllowed(validator, "PUT", "/a", List("a:observer", "a:admin"), description)
it should behave like accessIsForbidden(validator, "PUT", "/a", List("a:bar"), description)
it should behave like accessIsForbidden(validator, "PUT", "/a", List(), description)
it should behave like accessIsForbidden(validator, "PUT", "/a", List("a:observe"), description)
it should behave like accessIsForbiddenWhenNoXRoles(validator, "PUT", "/a", description)
// DELETE has resource level a:admin, method level a:observer and a:admin
it should behave like accessIsAllowed(validator, "DELETE", "/a", List("a:admin"), description)
it should behave like accessIsAllowed(validator, "DELETE", "/a", List("a:observer"), description)
it should behave like accessIsAllowed(validator, "DELETE", "/a", List("a:observer", "a:admin"), description)
it should behave like accessIsAllowed(validator, "DELETE", "/a", List("a:creator"), description)
it should behave like accessIsForbidden(validator, "DELETE", "/a", List("a:bar"), description)
it should behave like accessIsForbidden(validator, "DELETE", "/a", List(), description)
it should behave like accessIsForbidden(validator, "DELETE", "/a", List("a:observe"), description)
it should behave like accessIsForbiddenWhenNoXRoles(validator, "DELETE", "/a", description)
}
| rackerlabs/api-checker | core/src/test/scala/com/rackspace/com/papi/components/checker/GivenAWadlWithRolesAtResourceLevel.scala | Scala | apache-2.0 | 3,980 |
package fi.tp.experimental.pokerhands
import fi.tp.experimental.pokerhands.onepass.RecursiveOnePassScalaPokerHandComparator
import fi.tp.experimental.pokerhands.javainvokers.{ArrayOnePassJavaPokerHandComparatorInvoker, ObjectOrientedMultiPassJavaPokerHandComparatorInvoker}
import fi.tp.experimental.pokerhands.multipass.ParallelMultipassScalaPokerHandComparator
import org.scalatest.FunSuite
import scala.io.Source
class InputTextFilePokerHandComparatorTest extends FunSuite with GenericPokerHandComparatorTest {
test("Run tests from file with RecursiveOnePassScalaPokerHandComparator") {
def comparatorFunction = RecursiveOnePassScalaPokerHandComparator.compareHands _
runTestsFromFile(comparatorFunction _)
}
test("Run tests from file with ArrayOnePassJavaPokerHandComparator") {
def comparatorFunction = ArrayOnePassJavaPokerHandComparatorInvoker.compareHands _
runTestsFromFile(comparatorFunction _)
}
test("Run tests from file with ParallelMultipassScalaPokerHandComparator") {
def comparatorFunction = ParallelMultipassScalaPokerHandComparator.compareHands _
runTestsFromFile(comparatorFunction _)
}
test("Run tests from file with ObjectOrientedMultipassJavaPokerHandComparator") {
def comparatorFunction = ObjectOrientedMultiPassJavaPokerHandComparatorInvoker.compareHands _
runTestsFromFile(comparatorFunction _)
}
private def runTestsFromFile(compareFunction: () => (PokerHand, PokerHand) => Int) = {
Source.fromFile("src/test/resources/testhands1.csv").getLines().foreach(line => {
testHands(compareFunction())(line)
})
}
def testHands(compareFunction: (PokerHand, PokerHand) => Int)(handsLine: String) = {
println("Running test " + handsLine)
// parse line in the format As Kc Td 9c 8h > Kh 7c 5d 4h 2s # simple high
val handsWithoutComment = handsLine.split("#")
val comment = handsWithoutComment.reverse.head
val handsAndExpectedResult = handsWithoutComment.head
val hands = handsAndExpectedResult.split("[<>=]")
if (hands.length != 2) {
println("Skipping line because couldn't split into hands.\\n")
} else {
val handString1 = hands.head
val handString2 = hands.reverse.head
val expectedResult = handsAndExpectedResult.charAt(handString1.length)
val hand1: PokerHand = PokerHandConverter.handFromString(handString1)
val hand2: PokerHand = PokerHandConverter.handFromString(handString2)
runTestsForHands(compareFunction)(hand1, hand2, expectedResult, comment)
}
}
}
| taitopilvipete/experimental | src/test/scala/fi/tp/experimental/pokerhands/InputTextFilePokerHandComparatorTest.scala | Scala | apache-2.0 | 2,548 |
import stainless.annotation._
object Issue1167 {
@opaque @inlineOnce def f(x: Int): Int = 0
def test = assert(f(10) == f(10))
}
| epfl-lara/stainless | frontends/benchmarks/verification/valid/MicroTests/Issue1167.scala | Scala | apache-2.0 | 133 |
package BIDMach.models
import BIDMat.{Mat,SBMat,CMat,DMat,FMat,IMat,HMat,GMat,GIMat,GSMat,SMat,SDMat}
import BIDMat.MatFunctions._
import BIDMat.SciFunctions._
import BIDMach.datasources._
import BIDMach.updaters._
import BIDMach.mixins._
import BIDMach._
/**
* Basic DNN class. Learns a supervised map from input blocks to output (target) data blocks. There are currently 4 layer types:
- InputLayer: just a placeholder for the first layer which is loaded with input data blocks. No learnable params.
- FCLayer: Fully-Connected Linear layer. Has a matrix of learnable params which is the input-output map.
- RectLayer: Rectifying one-to-one layer. No params.
- GLMLayer: a one-to-one layer with GLM mappings (linear, logistic, abs-logistic and SVM). No learnable params.
*
* The network topology is specified by opts.layers which is a sequence of "LayerSpec" objects. There is a LayerSpec
* Class for each Layer class, which holds the params for defining that layer. Currently only two LayerSpec types need params:
- FC: holds the output dimensions of the FClayer (input dimension set by previous layer).
- GLM: holds the links matrix (integer specs for loss types, see GLM), for the output of that layer. Its size should match the
* number of targets.
*/
class DNN(override val opts:DNN.Opts = new DNN.Options) extends Model(opts) {
var layers:Array[Layer] = null
override def init() = {
if (refresh) {
mats = datasource.next;
var nfeats = mats(0).nrows;
datasource.reset;
layers = new Array[Layer](opts.layers.length);
var imodel = 0;
val nmodelmats = opts.layers.count(_ match {case x:DNN.ModelLayerSpec => true; case _ => false});
setmodelmats(Array[Mat](nmodelmats));
updatemats = new Array[Mat](nmodelmats);
for (i <- 0 until opts.layers.length) {
opts.layers(i) match {
case fcs:DNN.FC => {
layers(i) = new FCLayer(imodel);
modelmats(imodel) = if (useGPU) gnormrnd(0, 1, fcs.outsize, nfeats) else normrnd(0, 1, fcs.outsize, nfeats);
updatemats(imodel) = if (useGPU) gzeros(fcs.outsize, nfeats) else zeros(fcs.outsize, nfeats);
nfeats = fcs.outsize;
imodel += 1;
}
case rls:DNN.Rect => {
layers(i) = new RectLayer;
}
case ils:DNN.Input => {
layers(i) = new InputLayer;
}
case ols:DNN.GLM => {
layers(i) = new GLMLayer(ols.links);
}
}
if (i > 0) layers(i).input = layers(i-1)
}
}
}
def doblock(gmats:Array[Mat], ipass:Int, i:Long):Unit = {
layers(0).data = gmats(0);
layers(layers.length-1).target = gmats(1);
var i = 1
while (i < layers.length) {
layers(i).forward;
i += 1;
}
while (i > 1) {
i -= 1;
layers(i).backward;
}
}
def evalblock(mats:Array[Mat], ipass:Int, here:Long):FMat = {
layers(0).data = gmats(0);
val targ = gmats(1);
layers(layers.length-1).data = targ;
var i = 1;
while (i < layers.length) {
layers(i).forward;
i += 1;
}
if (putBack >= 0) {targ <-- layers(layers.length-1).data}
layers(layers.length-1).score
}
class Layer {
var data:Mat = null;
var target:Mat = null;
var deriv:Mat = null;
var input:Layer = null;
def forward = {};
def backward = {};
def score:FMat = zeros(1,1);
}
class FCLayer(val imodel:Int) extends Layer {
override def forward = {
data = modelmats(imodel) * input.data;
}
override def backward = {
if (imodel > 0) input.deriv = modelmats(imodel) ^* deriv;
updatemats(imodel) = deriv *^ input.data;
}
}
class RectLayer extends Layer {
override def forward = {
data = max(input.data, 0f)
}
override def backward = {
input.deriv = deriv β (input.data > 0f);
}
}
class InputLayer extends Layer {
}
class GLMLayer(val links:IMat) extends Layer {
val ilinks = if (useGPU) GIMat(links) else links;
var totflops = 0L;
for (i <- 0 until links.length) {
totflops += GLM.linkArray(links(i)).fnflops
}
override def forward = {
data = input.data + 0f
GLM.preds(data, data, ilinks, totflops)
}
override def backward = {
input.deriv = data + 1f
GLM.derivs(data, target, input.deriv, ilinks, totflops)
if (deriv.asInstanceOf[AnyRef] != null) {
input.deriv ~ input.deriv β deriv;
}
}
override def score:FMat = {
val v = GLM.llfun(data, target, ilinks, totflops);
FMat(mean(v, 2));
}
}
}
object DNN {
trait Opts extends Model.Opts {
var layers:Seq[LayerSpec] = null;
var links:IMat = null;
}
class Options extends Opts {}
class LayerSpec {}
class ModelLayerSpec extends LayerSpec{}
class FC(val outsize:Int) extends ModelLayerSpec {}
class Rect extends LayerSpec {}
class Input extends LayerSpec {}
class GLM(val links:IMat) extends LayerSpec {}
/**
* Build a stack of layers. layer(0) is an input layer, layer(n-1) is a GLM layer.
* Intermediate layers are FC alternating with Rect, starting and ending with FC.
* First FC layer width is given as an argument, then it tapers off by taper.
*/
def dlayers(depth0:Int, width:Int, taper:Float, ntargs:Int, opts:Opts) = {
val depth = (depth0/2)*2 + 1; // Round up to an odd number of layers
val layers = new Array[LayerSpec](depth);
var w = width
for (i <- 1 until depth - 2) {
if (i % 2 == 1) {
layers(i) = new FC(w);
w = (taper*w).toInt;
} else {
layers(i) = new Rect;
}
}
layers(0) = new Input;
layers(depth-2) = new FC(ntargs);
layers(depth-1) = new GLM(opts.links);
opts.layers = layers
layers
}
def learner(mat0:Mat, targ:Mat, d:Int) = {
class xopts extends Learner.Options with DNN.Opts with MatDS.Opts with ADAGrad.Opts
val opts = new xopts
if (opts.links == null) opts.links = izeros(1,targ.nrows)
opts.links.set(d)
opts.batchSize = math.min(100000, mat0.ncols/30 + 1)
dlayers(3, 0, 1f, targ.nrows, opts) // default to a 3-layer network
val nn = new Learner(
new MatDS(Array(mat0, targ), opts),
new DNN(opts),
null,
new ADAGrad(opts),
opts)
(nn, opts)
}
def learner(fnames:List[(Int)=>String], d:Int) = {
class xopts extends Learner.Options with DNN.Opts with SFilesDS.Opts with ADAGrad.Opts
val opts = new xopts
opts.dim = d
opts.fnames = fnames
opts.batchSize = 100000;
opts.eltsPerSample = 500;
implicit val threads = threadPool(4);
val ds = new SFilesDS(opts)
// dlayers(3, 0, 1f, targ.nrows, opts) // default to a 3-layer network
val nn = new Learner(
ds,
new DNN(opts),
null,
new ADAGrad(opts),
opts)
(nn, opts)
}
class LearnOptions extends Learner.Options with DNN.Opts with MatDS.Opts with ADAGrad.Opts with L1Regularizer.Opts
// This function constructs a learner and a predictor.
def learner(mat0:Mat, targ:Mat, mat1:Mat, preds:Mat, d:Int):(Learner, LearnOptions, Learner, LearnOptions) = {
val mopts = new LearnOptions;
val nopts = new LearnOptions;
mopts.lrate = 1f
mopts.batchSize = math.min(10000, mat0.ncols/30 + 1)
mopts.autoReset = false
if (mopts.links == null) mopts.links = izeros(targ.nrows,1)
nopts.links = mopts.links
mopts.links.set(d)
nopts.batchSize = mopts.batchSize
nopts.putBack = 1
dlayers(3, 0, 1f, targ.nrows, mopts) // default to a 3-layer network
val model = new DNN(mopts)
val mm = new Learner(
new MatDS(Array(mat0, targ), mopts),
model,
Array(new L1Regularizer(mopts)),
new ADAGrad(mopts), mopts)
val nn = new Learner(
new MatDS(Array(mat1, preds), nopts),
model,
null,
null,
nopts)
(mm, mopts, nn, nopts)
}
}
| bikash/BIDMach | src/main/scala/BIDMach/models/DNN.scala | Scala | bsd-3-clause | 8,304 |
package demo
package routes
import demo.components.{ReactPopoverInfo, ReactPopoverDemo}
import demo.pages.ReactPopoverPage
import japgolly.scalajs.react.extra.router.RouterConfigDsl
object ReactPopoverRouteModule {
case object Info extends LeftRoute("Info", "info", () => ReactPopoverInfo())
case object Demo extends LeftRoute("Demo", "demo", () => ReactPopoverDemo())
val menu: List[LeftRoute] = List(Info,Demo)
val routes = RouterConfigDsl[LeftRoute].buildRule { dsl =>
import dsl._
menu.map(i =>
staticRoute(i.route, i) ~> renderR(r => ReactPopoverPage(i, r))
).reduce(_ | _)
}
}
| elacin/scalajs-react-components | demo/src/main/scala/demo/routes/ReactPopoverRouteModule.scala | Scala | apache-2.0 | 620 |
import twitter4j._
import scala.collection.mutable.HashMap
import scala.collection.JavaConversions._
import scala.io._
import scalax.chart.api._
//Tega razreda ni potrebno spreminjati, samo dodajte svoje kljuΔe iz Twitterja
class TweetStreamer(manipulator: TweetManipulator) {
val config = new twitter4j.conf.ConfigurationBuilder()
.setOAuthConsumerKey("FtiwSLnEDmrzNvotmYJPjijC1") //sem vpiΕ‘ite 4 "kljuΔe"
.setOAuthConsumerSecret("mFaUqPQI2zMOU3c0WIFJdtggakqv2yTVsVle50kserVOXR0Mcg")
.setOAuthAccessToken("3921389776-RqGNPA1bUrFchPU94D7YWKBUW9Y5uLm46JrjU7T")
.setOAuthAccessTokenSecret("n9HAMIB26IuUogmTfR9Iavg5RsOINqrld1OsMCfrpSL8F")
.build
def simpleStatusListener = new StatusListener() {
def onStatus(status: Status) {
val tweet = status.getText
manipulator.process(tweet)
}
def onDeletionNotice(statusDeletionNotice: StatusDeletionNotice) {}
def onTrackLimitationNotice(numberOfLimitedStatuses: Int) {}
def onException(ex: Exception) { ex.printStackTrace }
def onScrubGeo(arg0: Long, arg1: Long) {}
def onStallWarning(warning: StallWarning) {}
}
}
trait TweetManipulator {
def process(tweet: String)
def outputFinal
}
//Primer obdelovalca tweetov
object TrivialManipulator extends TweetManipulator {
def process(tweet: String) = {
println("-------------------")
println(tweet)
}
def outputFinal = println("Finished streaming")
}
//Primer enostavne naloge
object NalogaDemo {
def main(args: Array[String]) {
val tS = new TweetStreamer(TrivialManipulator)
val twitterStream = new TwitterStreamFactory(tS.config).getInstance
twitterStream.addListener(tS.simpleStatusListener)
twitterStream.sample
Thread.sleep(10000)
TrivialManipulator.outputFinal
twitterStream.cleanUp
twitterStream.shutdown
}
}
// Naloga 1
object EnglishManipulator extends TweetManipulator {
lazy val dic = Source.fromFile("resources/wordsEn.txt").getLines.toList
def isEnglish(l: List[String]): Boolean = {
l.length / 2 <= l.count(p => dic.contains(p.toLowerCase))
}
def isEnglish(word: String):Boolean = dic.contains(word)
// my eyes hurt by all the variables :(
var numTweets = 0
var numEnTweets = 0
def process(tweet: String) = {
val l = tweet.split("\\s+").toList.map(x => SentimentManipulator.removePunc(x))
if(isEnglish(l)) {
println("-------------------")
println(tweet)
numTweets = numTweets + 1
numEnTweets = numEnTweets + 1
}
else {
numTweets = numTweets + 1
}
}
// izpiΕ‘emo razmerje angleΕ‘kih tweetov
def outputFinal = {
println("Number of English Tweets: " + numEnTweets)
println("Number of all Tweets: " + numTweets)
println("Ratio of English Tweets: " + (numEnTweets.toDouble / numTweets)*100 + "%")
}
}
object Naloga1 {
def main(args: Array[String]) {
val tS = new TweetStreamer(EnglishManipulator)
val twitterStream = new TwitterStreamFactory(tS.config).getInstance
twitterStream.addListener(tS.simpleStatusListener)
twitterStream.sample
Thread.sleep(10000)
EnglishManipulator.outputFinal
twitterStream.cleanUp
twitterStream.shutdown
}
}
// Naloga 2
object FreqManipulator extends TweetManipulator {
lazy val commons = Source.fromFile("resources/commonEng.txt").getLines.toList
var myMap = HashMap[String,Int]()
def isCommon(w: String): Boolean = commons.contains(w)
// sortiramo po pogostosti in vzamemo 100 elementov
def mostCommon: List[(String, Int)] = myMap.toArray.sortBy(_._2).toList.reverse.take(100)
def process(tweet: String) = {
val l = tweet.split("\\s+").toList.map(x => SentimentManipulator.removePunc(x))
// gremo skozi vse besede tweeta in preverimo da je angleΕ‘ka, ni coommon in da je daljΕ‘a od 3
for (word <- l if EnglishManipulator.isEnglish(word) && !isCommon(word) && word.length >= 3) {
if(myMap.contains(word)) myMap.put((word), myMap(word)+1)
else myMap.put(word, 1)
}
}
def outputFinal = {
mostCommon.foreach(println)
}
}
object Naloga2 {
def main(args: Array[String]) {
val tS = new TweetStreamer(FreqManipulator)
val twitterStream = new TwitterStreamFactory(tS.config).getInstance
twitterStream.addListener(tS.simpleStatusListener)
twitterStream.sample
Thread.sleep(10000)
FreqManipulator.outputFinal
twitterStream.cleanUp
twitterStream.shutdown
}
}
// Naloga 3
object SentimentManipulator extends TweetManipulator {
val startingTime = System.nanoTime()
// parsanje fajla v list tuplov
lazy val sentiments:List[(String,Int)] = {
Source.fromFile("resources/sentiment.txt").getLines.toList.map(x => {
val split = x.split("\\s+") match {
case Array(str1,str2) => (str1,str2.toInt)
case Array(str1,str2,str3) => (str1+str2, str3.toInt)
case Array(str1,str2,str3,str4) => (str1+str2+str3,str4.toInt)
case _ => throw new Exception
}
split
})
}
// parsanje list tuplov v hashmap
def buildMap:HashMap[String,Int] = {
var myMap = HashMap[String,Int]()
for(t <- sentiments) {
myMap.put(t._1,t._2)
}
myMap
}
lazy val myMap = buildMap
def removePunc(s:String):String = s.replaceAll("""[\p{Punct}&&[^']]""", "")
def sentimentValue(tweet: List[String]):Int = tweet.foldLeft(0)((acc, c) => if(myMap.contains(c)) acc + myMap(c) else acc)
def average(c:List[Int]):Double = c.foldLeft(0)((acc,c) => acc +c) / c.length.toDouble
var chartMap = HashMap[Int,List[Int]]()
def process(tweet: String) = {
val l = tweet.split(" ").toList.map(x => removePunc(x))
// procesira samo angleΕ‘ke tweete (da ni toliko nevtralnih besed, poslediΔno ima graf malo veΔje vrednosti)
if(EnglishManipulator.isEnglish(l)) {
val curent = sentimentValue(l)
val seconds = (System.nanoTime() - startingTime) / 1000000000.0
val minutes = (seconds / 60).toInt
if(chartMap.contains(minutes)) chartMap.put(minutes, (chartMap(minutes)):::List(curent))
else chartMap.put(minutes, List(curent))
}
}
// pripravimo list tuplov (minuta, povpreΔna vrednost v minuti)
def pripraviIzpis(map:HashMap[Int,List[Int]]):List[(Int,Double)] = {
map.toList.map(x => (x._1.toInt, BigDecimal(average(x._2)).setScale(2, BigDecimal.RoundingMode.HALF_UP).toDouble)).toArray.sortBy(_._1).toList
}
def outputFinal = {
val izpis = pripraviIzpis(chartMap)
val chart = XYLineChart(izpis)
chart.saveAsPNG("resources/chart.png")
chart.show()
}
}
object Naloga3 {
def main(args: Array[String]) {
val tS = new TweetStreamer(SentimentManipulator)
val twitterStream = new TwitterStreamFactory(tS.config).getInstance
twitterStream.addListener(tS.simpleStatusListener)
twitterStream.sample
Thread.sleep(3600000)
SentimentManipulator.outputFinal
twitterStream.cleanUp
twitterStream.shutdown
}
}
| Meemaw/scalaProgramming | week9/src/main/scala/Main.scala | Scala | mit | 6,914 |
package com.minalien.mffs.proxy
import net.minecraftforge.common.DimensionManager
/**
* Functionality common to both Server & Client.
*/
class CommonProxy {
def registerRenderers() {}
def isServer = !DimensionManager.getWorld(0).isRemote
}
| Vexatos/MFFS | src/main/scala/com/minalien/mffs/proxy/CommonProxy.scala | Scala | gpl-3.0 | 248 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package views.pages.amends
import forms.AmendPensionsTakenBeforeForm
import org.jsoup.Jsoup
import testHelpers.ViewSpecHelpers.CommonViewSpecHelper
import testHelpers.ViewSpecHelpers.amends.AmendIP14PensionsTakenBeforeViewSpecMessages
import uk.gov.hmrc.play.views.html.helpers.{ErrorSummary, FormWithCSRF}
import views.html.pages.amends.amendIP14PensionsTakenBefore
class AmendIP14PensionsTakenBeforeViewSpec extends CommonViewSpecHelper with AmendIP14PensionsTakenBeforeViewSpecMessages{
implicit val errorSummary: ErrorSummary = app.injector.instanceOf[ErrorSummary]
implicit val formWithCSRF: FormWithCSRF = app.injector.instanceOf[FormWithCSRF]
"the AmendIP14PensionsTakenBeforeView" should{
val pensionsForm = AmendPensionsTakenBeforeForm.amendPensionsTakenBeforeForm.bind(Map("amendedPensionsTakenBefore" -> "Yes",
"amendedPensionsTakenBeforeAmt" -> "12345",
"protectionType" -> "ip2014",
"status" -> "open"))
lazy val view = application.injector.instanceOf[amendIP14PensionsTakenBefore]
lazy val doc = Jsoup.parse(view.apply(pensionsForm).body)
val errorForm = AmendPensionsTakenBeforeForm.amendPensionsTakenBeforeForm.bind(Map.empty[String, String])
lazy val errorView = application.injector.instanceOf[amendIP14PensionsTakenBefore]
lazy val errorDoc = Jsoup.parse(errorView.apply(errorForm).body)
lazy val form = doc.select("form")
"have the correct title" in{
doc.title() shouldBe plaPensionsTakenBeforeTitle
}
"have the correct and properly formatted header"in{
doc.select("h1").text shouldBe plaPensionsTakenBeforeTitle
}
"have the right explanatory messages" in{
doc.select("h2").text shouldBe plaIP14PensionsTakenBeforeQuestion
doc.select("summary").text shouldBe plaPensionsTakenBeforeHelp
doc.select("p").eq(1).text shouldBe plaPensionsTakenBeforeParaOne
doc.select("p").eq(2).text shouldBe plaPensionsTakenBeforeParaTwo
doc.select("p").eq(3).text shouldBe plaPensionsTakenBeforeParaThree
}
"have a hidden menu with the correct list values" in{
doc.select("li").eq(0).text shouldBe plaPensionsTakenBeforeStepOne
doc.select("li").eq(1).text shouldBe plaIP14PensionsTakenBeforeStepTwo
doc.select("li").eq(2).text shouldBe plaPensionsTakenBeforeStepThree
doc.select("li").eq(3).text shouldBe plaPensionsTakenBeforeBulletOne
doc.select("li").eq(4).text shouldBe plaPensionsTakenBeforeBulletTwo
}
"have a help link redirecting to the right place" in{
doc.getElementsByTag("a").text shouldBe plaPensionsTakenBeforeHelpLinkText
doc.getElementsByTag("a").attr("href") shouldBe plaPensionsTakenBeforeHelpLinkLocation
}
"have a valid form" in{
form.attr("method") shouldBe "POST"
form.attr("action") shouldBe controllers.routes.AmendsController.submitAmendPensionsTakenBefore().url
form.select("legend.visually-hidden").text() shouldBe plaPensionsTakenBeforeLegendText
}
"have a Β£ symbol present" in{
doc.select(".poundSign").text shouldBe "Β£"
}
"have a pair of yes/no buttons" in{
doc.select("[for=amendedPensionsTakenBefore-yes]").text shouldBe plaBaseYes
doc.select("input#amendedPensionsTakenBefore-yes").attr("type") shouldBe "radio"
doc.select("[for=amendedPensionsTakenBefore-no]").text shouldBe plaBaseNo
doc.select("input#amendedPensionsTakenBefore-no").attr("type") shouldBe "radio"
}
"have a continue button" in{
doc.select("button").text shouldBe plaBaseChange
doc.select("button").attr("type") shouldBe "submit"
}
"display the correct errors appropriately" in{
errorForm.hasErrors shouldBe true
errorDoc.select("h2.h3-heading").text shouldBe plaBaseErrorSummaryLabel
errorDoc.select("span.error-notification").text shouldBe errorRequired
}
"not have errors on valid pages" in{
pensionsForm.hasErrors shouldBe false
doc.select("span.error-notification").text shouldBe ""
}
}
}
| hmrc/pensions-lifetime-allowance-frontend | test/views/pages/amends/AmendIP14PensionsTakenBeforeViewSpec.scala | Scala | apache-2.0 | 4,637 |
package mongodbapi
import org.specs2.matcher.Matcher
import org.specs2.mutable.Specification
import reactivemongo.bson.{BSONArray, BSONValue, BSONDocument}
trait BSONMatchers {
self: Specification =>
def toList(doc: BSONDocument): List[(String, BSONValue)] = {
def toList(bson: BSONValue, prefix: String): List[(String, BSONValue)] = bson match {
case doc: BSONDocument =>
doc.elements.flatMap {
case (key, value) => toList(value, prefix + "." + key)
}.toList
case arr: BSONArray =>
arr.values.zipWithIndex.flatMap {
case (value, idx) => toList(value, prefix + "." + idx)
}.toList
case other =>
List(prefix -> other)
}
toList(doc, "")
}
/**
* BSONDocumenten zijn 'ordered'. In mongo is { aap: 1, noot: 1 } niet gelijk aan { noot: 1, aap: 1}.
* Met reactivemongo heb je daar heel weinig last van door het (de)serializen van de BSONHandlers.
*
* Maar bij unittests willen we dit verschil soms niet zien. Een Map[String, ...] is niet ordered waardoor
* de equals method conceptueel logischer werkt.
*
* TODO: this will only work for BSONDocuments, not BSONArray with objects within it
*/
def bsonEqualTo(doc2: BSONDocument): Matcher[BSONDocument] = {
((doc1: BSONDocument) => toList(doc1)) ^^ beEqualTo(toList(doc2))
}
}
| kamilafsar/mongodb-api | src/test/scala/mongodbapi/BSONMatchers.scala | Scala | mit | 1,354 |
import scala.reflect.io.Streamable
import scala.tools.asm.{ClassWriter, ClassReader}
import scala.tools.asm.tree.ClassNode
import scala.tools.partest._
import scala.tools.partest.BytecodeTest.modifyClassFile
import java.io.{FileOutputStream, FileInputStream, File}
object Test extends DirectTest {
def code = ???
def compileCode(code: String) = {
val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator")
compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code)
}
def app = """
object O {
new test.Annotated
}
"""
def show(): Unit = {
compileCode(app)
modifyClassFile(new File(testOutput.toFile, "test/Annotated.class")) {
(cn: ClassNode) =>
// As investigated https://github.com/scala/bug/issues/2464#issuecomment-292371985
// classfiles in the wild sometimes lack the required InnerClass attribute for nested enums that
// are referenced in an annotation. I don't know what compiler or bytecode processor leaves things
// that way, but this test makes sure we don't crash.
cn.innerClasses.clear()
cn
}
compileCode(app)
}
}
| martijnhoekstra/scala | test/files/run/t2464/Test.scala | Scala | apache-2.0 | 1,195 |
package gapt.proofs
/**
* Class for convenient construction of proofs.
* Allows you to write proofs post-order style (Γ la Bussproofs). Example:
* <pre>
* (ProofBuilder
* c LogicalAxiom(A)
* c LogicalAxiom(B)
* u (WeakeningLeftRule(_, C))
* b (AndRightRule(_,_, And(A, B))
* qed)
* </pre>
* The constructor is private, so the only way to instantiate this class is by using the ProofBuilder object.
* This means that the stack will always be empty in the beginning.
*
* @param proofStack
*/
class ProofBuilder[+Proof]( private val proofStack: List[Proof] ) {
/**
* Pushes a proof onto the stack.
*
* @param proof An LKProof.
* @return
*/
def c[P >: Proof]( proof: P ): ProofBuilder[P] = new ProofBuilder( proof :: proofStack )
/**
* Applies a unary inference to the top element of the proof stack.
*
* @param inference A function LKProof => LKProof.
* @return
*/
def u[P >: Proof]( inference: Proof => P ): ProofBuilder[P] = proofStack match {
case p :: rest => new ProofBuilder( inference( p ) :: rest )
case _ => throw new Exception( "Cannot apply unary inference to empty stack." )
}
/**
* Applies a binary inference to the top two elements of the proof stack.
*
* @param inference A function (LKProof, LKProof) => LKProof
* @return
*/
def b[P >: Proof]( inference: ( Proof, Proof ) => P ): ProofBuilder[P] = proofStack match {
case Nil => throw new Exception( "Cannot apply binary inference to empty stack." )
case _ :: Nil => throw new Exception( "Cannot apply binary inference to stack with only one element." )
case p2 :: p1 :: rest => new ProofBuilder( inference( p1, p2 ) :: rest )
}
/**
* Applies a ternary inference to the top three elements of the proof stack.
*
* @param inference A function (NDProof, NDProof, NDProof) => NDProof
* @return
*/
def t[P >: Proof]( inference: ( Proof, Proof, Proof ) => P ): ProofBuilder[P] = proofStack match {
case Nil => throw new Exception( "Cannot apply ternary inference to empty stack." )
case _ :: Nil => throw new Exception( "Cannot apply ternary inference to stack with only one element." )
case _ :: _ :: Nil => throw new Exception( "Cannot apply ternary inference to stack with only two elements." )
case p3 :: p2 :: p1 :: rest => new ProofBuilder( inference( p1, p2, p3 ) :: rest )
}
/**
* If there is exactly one proof on the stack, returns that proof.
*
* @return
*/
def qed: Proof = proofStack match {
case Nil => throw new Exception( "Proof stack is empty." )
case p :: Nil => p
case _ => throw new Exception( "There is more than one proof on the stack." )
}
}
object ProofBuilder extends ProofBuilder[Nothing]( Nil )
| gapt/gapt | core/src/main/scala/gapt/proofs/ProofBuilder.scala | Scala | gpl-3.0 | 2,832 |
/* __ *\\
** ________ ___ / / ___ Scala API **
** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL **
** __\\ \\/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ **
** /____/\\___/_/ |_/____/_/ | | **
** |/ **
\\* */
package scala
package collection
package convert
import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc }
/** Defines converter methods from Scala to Java collections. */
trait AsJavaConverters {
import Wrappers._
/**
* Converts a Scala `Iterator` to a Java `Iterator`.
*
* The returned Java `Iterator` is backed by the provided Scala `Iterator` and any side-effects of
* using it via the Java interface will be visible via the Scala interface and vice versa.
*
* If the Scala `Iterator` was previously obtained from an implicit or explicit call of
* `[[JavaConverters.asScalaIterator]](java.util.Iterator)` then the original Java `Iterator` will
* be returned.
*
* @param i The Scala `Iterator` to be converted.
* @return A Java `Iterator` view of the argument.
*/
def asJavaIterator[A](i: Iterator[A]): ju.Iterator[A] = i match {
case null => null
case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]]
case _ => IteratorWrapper(i)
}
/**
* Converts a Scala `Iterator` to a Java `Enumeration`.
*
* The returned Java `Enumeration` is backed by the provided Scala `Iterator` and any side-effects
* of using it via the Java interface will be visible via the Scala interface and vice versa.
*
* If the Scala `Iterator` was previously obtained from an implicit or explicit call of
* `[[JavaConverters.enumerationAsScalaIterator]](java.util.Enumeration)` then the original Java
* `Enumeration` will be returned.
*
* @param i The Scala `Iterator` to be converted.
* @return A Java `Enumeration` view of the argument.
*/
def asJavaEnumeration[A](i: Iterator[A]): ju.Enumeration[A] = i match {
case null => null
case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]]
case _ => IteratorWrapper(i)
}
/**
* Converts a Scala `Iterable` to a Java `Iterable`.
*
* The returned Java `Iterable` is backed by the provided Scala `Iterable` and any side-effects of
* using it via the Java interface will be visible via the Scala interface and vice versa.
*
* If the Scala `Iterable` was previously obtained from an implicit or explicit call of
* `[[JavaConverters.iterableAsScalaIterable]](java.lang.Iterable)` then the original Java
* `Iterable` will be returned.
*
* @param i The Scala `Iterable` to be converted.
* @return A Java `Iterable` view of the argument.
*/
def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match {
case null => null
case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]]
case _ => IterableWrapper(i)
}
/**
* Converts a Scala `Iterable` to an immutable Java `Collection`.
*
* If the Scala `Iterable` was previously obtained from an implicit or explicit call of
* `[[JavaConverters.collectionAsScalaIterable]](java.util.Collection)` then the original Java
* `Collection` will be returned.
*
* @param i The Scala `Iterable` to be converted.
* @return A Java `Collection` view of the argument.
*/
def asJavaCollection[A](i: Iterable[A]): ju.Collection[A] = i match {
case null => null
case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]]
case _ => new IterableWrapper(i)
}
/**
* Converts a Scala mutable `Buffer` to a Java List.
*
* The returned Java List is backed by the provided Scala `Buffer` and any side-effects of using
* it via the Java interface will be visible via the Scala interface and vice versa.
*
* If the Scala `Buffer` was previously obtained from an implicit or explicit call of
* `[[JavaConverters.asScalaBuffer]](java.util.List)` then the original Java `List` will be
* returned.
*
* @param b The Scala `Buffer` to be converted.
* @return A Java `List` view of the argument.
*/
def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match {
case null => null
case JListWrapper(wrapped) => wrapped
case _ => new MutableBufferWrapper(b)
}
/**
* Converts a Scala mutable `Seq` to a Java `List`.
*
* The returned Java `List` is backed by the provided Scala `Seq` and any side-effects of using it
* via the Java interface will be visible via the Scala interface and vice versa.
*
* If the Scala `Seq` was previously obtained from an implicit or explicit call of
* `[[JavaConverters.asScalaBuffer]](java.util.List)` then the original Java `List` will be
* returned.
*
* @param s The Scala `Seq` to be converted.
* @return A Java `List` view of the argument.
*/
def mutableSeqAsJavaList[A](s: mutable.Seq[A]): ju.List[A] = s match {
case null => null
case JListWrapper(wrapped) => wrapped
case _ => new MutableSeqWrapper(s)
}
/**
* Converts a Scala `Seq` to a Java `List`.
*
* The returned Java `List` is backed by the provided Scala `Seq` and any side-effects of using it
* via the Java interface will be visible via the Scala interface and vice versa.
*
* If the Scala `Seq` was previously obtained from an implicit or explicit call of
* `[[JavaConverters.asScalaBuffer]](java.util.List)` then the original Java `List` will be
* returned.
*
* @param s The Scala `Seq` to be converted.
* @return A Java `List` view of the argument.
*/
def seqAsJavaList[A](s: Seq[A]): ju.List[A] = s match {
case null => null
case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]]
case _ => new SeqWrapper(s)
}
/**
* Converts a Scala mutable `Set` to a Java `Set`.
*
* The returned Java `Set` is backed by the provided Scala `Set` and any side-effects of using it
* via the Java interface will be visible via the Scala interface and vice versa.
*
* If the Scala `Set` was previously obtained from an implicit or explicit call of
* `[[JavaConverters.asScalaSet]](java.util.Set)` then the original Java `Set` will be returned.
*
* @param s The Scala mutable `Set` to be converted.
* @return A Java `Set` view of the argument.
*/
def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match {
case null => null
case JSetWrapper(wrapped) => wrapped
case _ => new MutableSetWrapper(s)
}
/**
* Converts a Scala `Set` to a Java `Set`.
*
* The returned Java `Set` is backed by the provided Scala `Set` and any side-effects of using it
* via the Java interface will be visible via the Scala interface and vice versa.
*
* If the Scala `Set` was previously obtained from an implicit or explicit call of
* `[[JavaConverters.asScalaSet]](java.util.Set)` then the original Java `Set` will be returned.
*
* @param s The Scala `Set` to be converted.
* @return A Java `Set` view of the argument.
*/
def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match {
case null => null
case JSetWrapper(wrapped) => wrapped
case _ => new SetWrapper(s)
}
/**
* Converts a Scala mutable `Map` to a Java `Map`.
*
* The returned Java `Map` is backed by the provided Scala `Map` and any side-effects of using it
* via the Java interface will be visible via the Scala interface and vice versa.
*
* If the Scala `Map` was previously obtained from an implicit or explicit call of
* `[[JavaConverters.mapAsScalaMap]](java.util.Map)` then the original Java `Map` will be
* returned.
*
* @param m The Scala mutable `Map` to be converted.
* @return A Java `Map` view of the argument.
*/
def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match {
case null => null
case JMapWrapper(wrapped) => wrapped
case _ => new MutableMapWrapper(m)
}
/**
* Converts a Scala mutable `Map` to a Java `Dictionary`.
*
* The returned Java `Dictionary` is backed by the provided Scala `Dictionary` and any
* side-effects of using it via the Java interface will be visible via the Scala interface and
* vice versa.
*
* If the Scala `Dictionary` was previously obtained from an implicit or explicit call of
* `[[JavaConverters.dictionaryAsScalaMap]](java.util.Dictionary)` then the original Java
* `Dictionary` will be returned.
*
* @param m The Scala `Map` to be converted.
* @return A Java `Dictionary` view of the argument.
*/
def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match {
case null => null
case JDictionaryWrapper(wrapped) => wrapped
case _ => new DictionaryWrapper(m)
}
/**
* Converts a Scala `Map` to a Java `Map`.
*
* The returned Java `Map` is backed by the provided Scala `Map` and any side-effects of using it
* via the Java interface will be visible via the Scala interface and vice versa.
*
* If the Scala `Map` was previously obtained from an implicit or explicit call of
* `[[JavaConverters.mapAsScalaMap]](java.util.Map)` then the original Java `Map` will be
* returned.
*
* @param m The Scala `Map` to be converted.
* @return A Java `Map` view of the argument.
*/
def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match {
case null => null
case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]]
case _ => new MapWrapper(m)
}
/**
* Converts a Scala mutable `concurrent.Map` to a Java `ConcurrentMap`.
*
* The returned Java `ConcurrentMap` is backed by the provided Scala `concurrent.Map` and any
* side-effects of using it via the Java interface will be visible via the Scala interface and
* vice versa.
*
* If the Scala `concurrent.Map` was previously obtained from an implicit or explicit call of
* `[[JavaConverters.mapAsScalaConcurrentMap]](java.util.concurrent.ConcurrentMap)` then the
* original Java `ConcurrentMap` will be returned.
*
* @param m The Scala `concurrent.Map` to be converted.
* @return A Java `ConcurrentMap` view of the argument.
*/
def mapAsJavaConcurrentMap[A, B](m: concurrent.Map[A, B]): juc.ConcurrentMap[A, B] = m match {
case null => null
case JConcurrentMapWrapper(wrapped) => wrapped
case _ => new ConcurrentMapWrapper(m)
}
}
| felixmulder/scala | src/library/scala/collection/convert/AsJavaConverters.scala | Scala | bsd-3-clause | 11,214 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.carbondata.spark.testsuite.detailquery
import org.apache.spark.sql.Row
import org.scalatest.BeforeAndAfterAll
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties
import org.apache.spark.sql.test.util.QueryTest
/**
* Test Class for verifying NO_DICTIONARY_COLUMN feature.
*/
class NO_DICTIONARY_COL_TestCase extends QueryTest with BeforeAndAfterAll {
override def beforeAll {
//For the Hive table creation and data loading
sql("drop table if exists filtertestTable")
sql("drop table if exists NO_DICTIONARY_HIVE_6")
sql("drop table if exists NO_DICTIONARY_CARBON_6")
sql("drop table if exists NO_DICTIONARY_CARBON_7")
sql(
"create table NO_DICTIONARY_HIVE_6(empno string,empname string,designation string,doj " +
"Timestamp,workgroupcategory int, " +
"workgroupcategoryname string,deptno int, deptname string, projectcode int, " +
"projectjoindate Timestamp,projectenddate Timestamp,attendance int, "
+ "utilization int,salary int) row format delimited fields terminated by ',' " +
"tblproperties(\\"skip.header.line.count\\"=\\"1\\") " +
""
)
sql(
s"load data local inpath '$resourcesPath/datawithoutheader.csv' into table " +
"NO_DICTIONARY_HIVE_6"
);
//For Carbon cube creation.
sql("CREATE TABLE NO_DICTIONARY_CARBON_6 (empno string, " +
"doj Timestamp, workgroupcategory Int, empname String,workgroupcategoryname String, " +
"deptno Int, deptname String, projectcode Int, projectjoindate Timestamp, " +
"projectenddate Timestamp, designation String,attendance Int,utilization " +
"Int,salary Int) STORED BY 'org.apache.carbondata.format' " +
"TBLPROPERTIES('DICTIONARY_EXCLUDE'='empname,designation')"
)
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE NO_DICTIONARY_CARBON_6 " +
"OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\\"')"
)
sql("CREATE TABLE NO_DICTIONARY_CARBON_7 (empno string, " +
"doj Timestamp, workgroupcategory Int, empname String,workgroupcategoryname String, " +
"deptno Int, deptname String, projectcode Int, projectjoindate Timestamp, " +
"projectenddate Timestamp, designation String,attendance Int,utilization " +
"Int,salary Int) STORED BY 'org.apache.carbondata.format' " +
"TBLPROPERTIES('DICTIONARY_EXCLUDE'='empno,empname,designation')"
)
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/data.csv' INTO TABLE NO_DICTIONARY_CARBON_7 " +
"OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '\\"')"
)
sql("CREATE TABLE filtertestTable (ID string,date Timestamp, country String, " +
"name String, phonetype String, serialname String, salary Int) " +
"STORED BY 'org.apache.carbondata.format' " + "TBLPROPERTIES('DICTIONARY_EXCLUDE'='ID')"
)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd HH:mm:ss")
sql(
s"LOAD DATA LOCAL INPATH '$resourcesPath/data2.csv' INTO TABLE filtertestTable OPTIONS"+
s"('DELIMITER'= ',', " +
s"'FILEHEADER'= '')"
)
}
test("Count (*) with filter") {
sql("select * from NO_DICTIONARY_CARBON_6 where empno > '11' and empno <= '30'")
checkAnswer(
sql("select count(*) from NO_DICTIONARY_CARBON_6 where empno='11'"),
Seq(Row(1))
)
}
test("Detail Query with NO_DICTIONARY_COLUMN Compare With HIVE RESULT") {
checkAnswer(
sql("select empno from NO_DICTIONARY_CARBON_6"),
Seq(Row("11"), Row("12"), Row("13"), Row("14"), Row("15"), Row("16"), Row("17"), Row("18"), Row("19"), Row("20"))
)
}
test("Detail Query with NO_DICTIONARY_COLUMN with Like range filter") {
checkAnswer(
sql("select empno from NO_DICTIONARY_CARBON_7 where empno like '12%'"),
Seq(Row("12"))
)
}
test("Detail Query with NO_DICTIONARY_COLUMN with greater than range filter") {
checkAnswer(
sql("select empno from NO_DICTIONARY_CARBON_7 where empno>'19'"),
Seq(Row("20"))
)
}
test("Detail Query with NO_DICTIONARY_COLUMN with in filter Compare With HIVE RESULT") {
checkAnswer(
sql("select empno from NO_DICTIONARY_CARBON_6 where empno in('11','12','13')"),
Seq(Row("11"), Row("12"), Row("13"))
)
}
test("Detail Query with NO_DICTIONARY_COLUMN with not in filter Compare With HIVE RESULT") {
checkAnswer(
sql("select empno from NO_DICTIONARY_CARBON_6 where empno not in('11','12','13','14','15','16','17')"),
Seq(Row("18"), Row("19"), Row("20"))
)
}
test("Detail Query with NO_DICTIONARY_COLUMN with equals filter Compare With HIVE RESULT") {
checkAnswer(
sql("select empno from NO_DICTIONARY_CARBON_6 where empno='17'"),
Seq(Row("17"))
)
}
test("Detail Query with NO_DICTIONARY_COLUMN with IS NOT NULL filter") {
checkAnswer(
sql("select id from filtertestTable where id is not null"),
Seq(Row("4"),Row("6"),Row("abc"))
)
}
test("filter with arithmetic expression") {
checkAnswer(
sql("select id from filtertestTable " + "where id+2 = 6"),
Seq(Row("4"))
)
}
test("Detail Query with NO_DICTIONARY_COLUMN with equals multiple filter Compare With HIVE " +
"RESULT"
) {
checkAnswer(
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_6 where empno='17'"),
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_6 where empno='17'")
)
}
test("ORDER Query with NO_DICTIONARY_COLUMN Compare With HIVE RESULT") {
checkAnswer(
sql("select empno from NO_DICTIONARY_HIVE_6 order by empno"),
sql("select empno from NO_DICTIONARY_CARBON_6 order by empno")
)
}
//TODO need to add filter test cases for no dictionary columns
//
// test("Filter Query with NO_DICTIONARY_COLUMN and DICTIONARY_COLUMN Compare With HIVE
// RESULT") {
//
// checkAnswer(
// sql("select empno from NO_DICTIONARY_HIVE_6 where empno=15 and deptno=12"),
// sql("select empno from NO_DICTIONARY_CARBON_6 where empno=15 and deptno=12"))
// }
test("Distinct Query with NO_DICTIONARY_COLUMN Compare With HIVE RESULT") {
checkAnswer(
sql("select count(distinct empno) from NO_DICTIONARY_HIVE_6"),
sql("select count(distinct empno) from NO_DICTIONARY_CARBON_6")
)
}
test("Sum Query with NO_DICTIONARY_COLUMN Compare With HIVE RESULT") {
checkAnswer(
sql("select sum(empno) from NO_DICTIONARY_HIVE_6"),
sql("select sum(empno) from NO_DICTIONARY_CARBON_6")
)
}
test("average Query with NO_DICTIONARY_COLUMN Compare With HIVE RESULT") {
checkAnswer(
sql("select avg(empno) from NO_DICTIONARY_HIVE_6"),
sql("select avg(empno) from NO_DICTIONARY_CARBON_6")
)
}
test("Multiple column group Query with NO_DICTIONARY_COLUMN Compare With HIVE RESULT") {
checkAnswer(
sql(
"select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_6 group by empno,empname," +
"workgroupcategory"
),
sql(
"select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_6 group by empno," +
"empname,workgroupcategory"
)
)
}
test("Multiple column Detail Query with NO_DICTIONARY_COLUMN Compare With HIVE RESULT") {
checkAnswer(
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_HIVE_6"),
sql("select empno,empname,workgroupcategory from NO_DICTIONARY_CARBON_6 ")
)
}
override def afterAll {
sql("drop table if exists filtertestTable")
sql("drop table if exists NO_DICTIONARY_HIVE_6")
sql("drop table if exists NO_DICTIONARY_CARBON_6")
sql("drop table if exists NO_DICTIONARY_CARBON_7")
//sql("drop cube NO_DICTIONARY_CARBON_1")
}
} | ravipesala/incubator-carbondata | integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/detailquery/HighCardinalityDataTypesTestCase.scala | Scala | apache-2.0 | 8,736 |
package us.blelbinha.scalaredisexample
object Main {
def main(args: Array[String]) {
}
}
| ExNexu/scala-redis-example | src/main/scala/us/blelbinha/scalaredisexample/Main.scala | Scala | bsd-3-clause | 95 |
package com.jeffrey.ocr
import scala.collection.mutable
trait Digit {
val marks: Set[DigitMark]
val number: Option[Int]
val isLegal = true
def alternatives = Digit.getAlternatives(this)
def toChar = {
number match {
case Some(n) => (48 + n).toChar
case None => '?'
}
}
override def toString = {
(if (marks.contains(TopMark)) " _ " else " ") + "\\n" +
(if (marks.contains(TopLeftMark)) "|" else " ") +
(if (marks.contains(MiddleMark)) "_" else " ") +
(if (marks.contains(TopRightMark)) "|" else " ") + "\\n" +
(if (marks.contains(BottomLeftMark)) "|" else " ") +
(if (marks.contains(BottomMark)) "_" else " ") +
(if (marks.contains(BottomRightMark)) "|" else " ") + "\\n"
}
}
case object Zero extends Digit {
val number = Some(0)
val marks = Set[DigitMark](
TopMark, TopRightMark, BottomRightMark, BottomMark,
BottomLeftMark, TopLeftMark
)
}
case object One extends Digit {
val number = Some(1)
val marks = Set[DigitMark](TopRightMark, BottomRightMark)
}
case object Two extends Digit {
val number = Some(2)
val marks = Set[DigitMark](TopMark, TopRightMark, MiddleMark, BottomLeftMark, BottomMark)
}
case object Three extends Digit {
val number = Some(3)
val marks = Set[DigitMark](TopMark, TopRightMark, MiddleMark, BottomRightMark, BottomMark)
}
case object Four extends Digit {
val number = Some(4)
val marks = Set[DigitMark](TopLeftMark, TopRightMark, MiddleMark, BottomRightMark)
}
case object Five extends Digit {
val number = Some(5)
val marks = Set[DigitMark](TopMark, TopLeftMark, MiddleMark, BottomRightMark, BottomMark)
}
case object Six extends Digit {
val number = Some(6)
val marks = Set[DigitMark](TopMark, TopLeftMark, MiddleMark, BottomLeftMark, BottomRightMark, BottomMark)
}
case object Seven extends Digit {
val number = Some(7)
val marks = Set[DigitMark](TopMark, TopRightMark, BottomRightMark)
}
case object Eight extends Digit {
val number = Some(8)
val marks = Set[DigitMark](TopMark, TopLeftMark, MiddleMark, TopRightMark, BottomLeftMark, BottomRightMark,
BottomMark)
}
case object Nine extends Digit {
val number = Some(9)
val marks = Set[DigitMark](TopMark, TopLeftMark, MiddleMark, TopRightMark, BottomRightMark, BottomMark)
}
case class InvalidDigit(marks: Set[DigitMark]) extends Digit {
val number = None
override val isLegal = false
}
object Digit {
val allDigits = Set[Digit](Zero, One, Two, Three, Four, Five, Six, Seven, Eight, Nine)
def apply(lines: List[String]) = {
val marks = parseDigitMarks(lines)
allDigits.find(_.marks == marks).getOrElse(InvalidDigit(marks))
}
private def parseDigitMarks(lines: List[String]): Set[DigitMark] = {
val marks = new mutable.HashSet[DigitMark]
if (lines(0)(1) == '_')
marks += TopMark
if (lines(1)(0) == '|')
marks += TopLeftMark
if (lines(1)(2) == '|')
marks += TopRightMark
if (lines(1)(1) == '_')
marks += MiddleMark
if (lines(2)(0) == '|')
marks += BottomLeftMark
if (lines(2)(2) == '|')
marks += BottomRightMark
if (lines(2)(1) == '_')
marks += BottomMark
return marks.toSet
}
/**
* Return a list of any digits that the given digit can mutate into
* with only one digit mark change
*/
def getAlternatives(digit: Digit): List[Digit] = {
allDigits.filter(d => {
val intersect = (d.marks intersect digit.marks)
math.abs(intersect.size - math.max(d.marks.size, digit.marks.size)) == 1
}).toList
}
}
trait DigitMark
case object TopMark extends DigitMark
case object TopRightMark extends DigitMark
case object BottomRightMark extends DigitMark
case object BottomMark extends DigitMark
case object BottomLeftMark extends DigitMark
case object TopLeftMark extends DigitMark
case object MiddleMark extends DigitMark
| jbarrus/xtrac-dojo | src/main/scala/com/jeffrey/ocr/Digit.scala | Scala | mit | 4,170 |
package org.scalatra
import org.scalatra.test.scalatest.ScalatraFunSuite
import skinny.micro.SkinnyMicroServlet
import scala.collection.mutable.Map
trait AttributesTest {
this: ScalatraFunSuite =>
trait AttributesServlet extends SkinnyMicroServlet {
def attributesMap: Map[String, Any]
get("/attributes-test") {
attributesMap("one") = "1"
attributesMap("two") = "2"
attributesMap("three") = "3"
attributesMap -= "two"
attributesMap foreach { case (k, v) => response.setHeader(k, v.toString) }
}
}
test("apply should set request attribute") {
get("/attributes-test") {
header("one") should equal("1")
header("three") should equal("3")
}
}
test("-= should remove request attribute") {
get("/attributes-test") {
header("two") should equal(null)
}
}
}
| xerial/skinny-micro | micro/src/test/scala/org/scalatra/AttributesTest.scala | Scala | bsd-2-clause | 845 |
package com.adamek.example.configuration
import org.springframework.context.annotation.Configuration
import org.springframework.web.servlet.config.annotation.{EnableWebMvc, ResourceHandlerRegistry, WebMvcConfigurerAdapter}
@Configuration
@EnableWebMvc
class WebConfig extends WebMvcConfigurerAdapter {
/**
* This id just for testing purpose. In real case view resolver should be
* used for serving html content.
*/
override def addResourceHandlers(registry: ResourceHandlerRegistry) {
registry.addResourceHandler("/**").addResourceLocations("/");
}
}
| marekadamek/spring4-scala-websocket | src/main/scala/com/adamek/example/configuration/WebConfig.scala | Scala | mit | 574 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.streaming
import java.io.File
import java.nio.ByteBuffer
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.duration._
import scala.language.{implicitConversions, postfixOps}
import scala.util.Random
import org.apache.hadoop.conf.Configuration
import org.scalatest.{BeforeAndAfter, Matchers}
import org.scalatest.concurrent.Eventually._
import org.apache.spark.{Logging, SparkConf, SparkException, SparkFunSuite}
import org.apache.spark.storage.StreamBlockId
import org.apache.spark.streaming.receiver.BlockManagerBasedStoreResult
import org.apache.spark.streaming.scheduler._
import org.apache.spark.streaming.util._
import org.apache.spark.streaming.util.WriteAheadLogSuite._
import org.apache.spark.util.{Clock, ManualClock, SystemClock, Utils}
class ReceivedBlockTrackerSuite
extends SparkFunSuite with BeforeAndAfter with Matchers with Logging {
val hadoopConf = new Configuration()
val akkaTimeout = 10 seconds
val streamId = 1
var allReceivedBlockTrackers = new ArrayBuffer[ReceivedBlockTracker]()
var checkpointDirectory: File = null
var conf: SparkConf = null
before {
conf = new SparkConf().setMaster("local[2]").setAppName("ReceivedBlockTrackerSuite")
checkpointDirectory = Utils.createTempDir()
}
after {
allReceivedBlockTrackers.foreach { _.stop() }
Utils.deleteRecursively(checkpointDirectory)
}
test("block addition, and block to batch allocation") {
val receivedBlockTracker = createTracker(setCheckpointDir = false)
receivedBlockTracker.isWriteAheadLogEnabled should be (false) // should be disable by default
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldEqual Seq.empty
val blockInfos = generateBlockInfos()
blockInfos.map(receivedBlockTracker.addBlock)
// Verify added blocks are unallocated blocks
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldEqual blockInfos
receivedBlockTracker.hasUnallocatedReceivedBlocks should be (true)
// Allocate the blocks to a batch and verify that all of them have been allocated
receivedBlockTracker.allocateBlocksToBatch(1)
receivedBlockTracker.getBlocksOfBatchAndStream(1, streamId) shouldEqual blockInfos
receivedBlockTracker.getBlocksOfBatch(1) shouldEqual Map(streamId -> blockInfos)
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldBe empty
receivedBlockTracker.hasUnallocatedReceivedBlocks should be (false)
// Allocate no blocks to another batch
receivedBlockTracker.allocateBlocksToBatch(2)
receivedBlockTracker.getBlocksOfBatchAndStream(2, streamId) shouldBe empty
receivedBlockTracker.getBlocksOfBatch(2) shouldEqual Map(streamId -> Seq.empty)
// Verify that older batches have no operation on batch allocation,
// will return the same blocks as previously allocated.
receivedBlockTracker.allocateBlocksToBatch(1)
receivedBlockTracker.getBlocksOfBatchAndStream(1, streamId) shouldEqual blockInfos
blockInfos.map(receivedBlockTracker.addBlock)
receivedBlockTracker.allocateBlocksToBatch(2)
receivedBlockTracker.getBlocksOfBatchAndStream(2, streamId) shouldBe empty
receivedBlockTracker.getUnallocatedBlocks(streamId) shouldEqual blockInfos
}
test("recovery and cleanup with write ahead logs") {
val manualClock = new ManualClock
// Set the time increment level to twice the rotation interval so that every increment creates
// a new log file
def incrementTime() {
val timeIncrementMillis = 2000L
manualClock.advance(timeIncrementMillis)
}
// Generate and add blocks to the given tracker
def addBlockInfos(tracker: ReceivedBlockTracker): Seq[ReceivedBlockInfo] = {
val blockInfos = generateBlockInfos()
blockInfos.map(tracker.addBlock)
blockInfos
}
// Print the data present in the log ahead files in the log directory
def printLogFiles(message: String) {
val fileContents = getWriteAheadLogFiles().map { file =>
(s"\n>>>>> $file: <<<<<\n${getWrittenLogData(file).mkString("\n")}")
}.mkString("\n")
logInfo(s"\n\n=====================\n$message\n$fileContents\n=====================\n")
}
// Set WAL configuration
conf.set("spark.streaming.driver.writeAheadLog.rollingIntervalSecs", "1")
require(WriteAheadLogUtils.getRollingIntervalSecs(conf, isDriver = true) === 1)
// Start tracker and add blocks
val tracker1 = createTracker(clock = manualClock)
tracker1.isWriteAheadLogEnabled should be (true)
val blockInfos1 = addBlockInfos(tracker1)
tracker1.getUnallocatedBlocks(streamId).toList shouldEqual blockInfos1
// Verify whether write ahead log has correct contents
val expectedWrittenData1 = blockInfos1.map(BlockAdditionEvent)
getWrittenLogData() shouldEqual expectedWrittenData1
getWriteAheadLogFiles() should have size 1
incrementTime()
// Recovery without recovery from WAL and verify list of unallocated blocks is empty
val tracker1_ = createTracker(clock = manualClock, recoverFromWriteAheadLog = false)
tracker1_.getUnallocatedBlocks(streamId) shouldBe empty
tracker1_.hasUnallocatedReceivedBlocks should be (false)
// Restart tracker and verify recovered list of unallocated blocks
val tracker2 = createTracker(clock = manualClock, recoverFromWriteAheadLog = true)
val unallocatedBlocks = tracker2.getUnallocatedBlocks(streamId).toList
unallocatedBlocks shouldEqual blockInfos1
unallocatedBlocks.foreach { block =>
block.isBlockIdValid() should be (false)
}
// Allocate blocks to batch and verify whether the unallocated blocks got allocated
val batchTime1 = manualClock.getTimeMillis()
tracker2.allocateBlocksToBatch(batchTime1)
tracker2.getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual blockInfos1
tracker2.getBlocksOfBatch(batchTime1) shouldEqual Map(streamId -> blockInfos1)
// Add more blocks and allocate to another batch
incrementTime()
val batchTime2 = manualClock.getTimeMillis()
val blockInfos2 = addBlockInfos(tracker2)
tracker2.allocateBlocksToBatch(batchTime2)
tracker2.getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2
// Verify whether log has correct contents
val expectedWrittenData2 = expectedWrittenData1 ++
Seq(createBatchAllocation(batchTime1, blockInfos1)) ++
blockInfos2.map(BlockAdditionEvent) ++
Seq(createBatchAllocation(batchTime2, blockInfos2))
getWrittenLogData() shouldEqual expectedWrittenData2
// Restart tracker and verify recovered state
incrementTime()
val tracker3 = createTracker(clock = manualClock, recoverFromWriteAheadLog = true)
tracker3.getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual blockInfos1
tracker3.getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2
tracker3.getUnallocatedBlocks(streamId) shouldBe empty
// Cleanup first batch but not second batch
val oldestLogFile = getWriteAheadLogFiles().head
incrementTime()
tracker3.cleanupOldBatches(batchTime2, waitForCompletion = true)
// Verify that the batch allocations have been cleaned, and the act has been written to log
tracker3.getBlocksOfBatchAndStream(batchTime1, streamId) shouldEqual Seq.empty
getWrittenLogData(getWriteAheadLogFiles().last) should contain(createBatchCleanup(batchTime1))
// Verify that at least one log file gets deleted
eventually(timeout(10 seconds), interval(10 millisecond)) {
getWriteAheadLogFiles() should not contain oldestLogFile
}
printLogFiles("After clean")
// Restart tracker and verify recovered state, specifically whether info about the first
// batch has been removed, but not the second batch
incrementTime()
val tracker4 = createTracker(clock = manualClock, recoverFromWriteAheadLog = true)
tracker4.getUnallocatedBlocks(streamId) shouldBe empty
tracker4.getBlocksOfBatchAndStream(batchTime1, streamId) shouldBe empty // should be cleaned
tracker4.getBlocksOfBatchAndStream(batchTime2, streamId) shouldEqual blockInfos2
}
test("disable write ahead log when checkpoint directory is not set") {
// When checkpoint is disabled, then the write ahead log is disabled
val tracker1 = createTracker(setCheckpointDir = false)
tracker1.isWriteAheadLogEnabled should be (false)
}
test("parallel file deletion in FileBasedWriteAheadLog is robust to deletion error") {
conf.set("spark.streaming.driver.writeAheadLog.rollingIntervalSecs", "1")
require(WriteAheadLogUtils.getRollingIntervalSecs(conf, isDriver = true) === 1)
val addBlocks = generateBlockInfos()
val batch1 = addBlocks.slice(0, 1)
val batch2 = addBlocks.slice(1, 3)
val batch3 = addBlocks.slice(3, addBlocks.length)
assert(getWriteAheadLogFiles().length === 0)
// list of timestamps for files
val t = Seq.tabulate(5)(i => i * 1000)
writeEventsManually(getLogFileName(t(0)), Seq(createBatchCleanup(t(0))))
assert(getWriteAheadLogFiles().length === 1)
// The goal is to create several log files which should have been cleaned up.
// If we face any issue during recovery, because these old files exist, then we need to make
// deletion more robust rather than a parallelized operation where we fire and forget
val batch1Allocation = createBatchAllocation(t(1), batch1)
writeEventsManually(getLogFileName(t(1)), batch1.map(BlockAdditionEvent) :+ batch1Allocation)
writeEventsManually(getLogFileName(t(2)), Seq(createBatchCleanup(t(1))))
val batch2Allocation = createBatchAllocation(t(3), batch2)
writeEventsManually(getLogFileName(t(3)), batch2.map(BlockAdditionEvent) :+ batch2Allocation)
writeEventsManually(getLogFileName(t(4)), batch3.map(BlockAdditionEvent))
// We should have 5 different log files as we called `writeEventsManually` with 5 different
// timestamps
assert(getWriteAheadLogFiles().length === 5)
// Create the tracker to recover from the log files. We're going to ask the tracker to clean
// things up, and then we're going to rewrite that data, and recover using a different tracker.
// They should have identical data no matter what
val tracker = createTracker(recoverFromWriteAheadLog = true, clock = new ManualClock(t(4)))
def compareTrackers(base: ReceivedBlockTracker, subject: ReceivedBlockTracker): Unit = {
subject.getBlocksOfBatchAndStream(t(3), streamId) should be(
base.getBlocksOfBatchAndStream(t(3), streamId))
subject.getBlocksOfBatchAndStream(t(1), streamId) should be(
base.getBlocksOfBatchAndStream(t(1), streamId))
subject.getBlocksOfBatchAndStream(t(0), streamId) should be(Nil)
}
// ask the tracker to clean up some old files
tracker.cleanupOldBatches(t(3), waitForCompletion = true)
assert(getWriteAheadLogFiles().length === 3)
val tracker2 = createTracker(recoverFromWriteAheadLog = true, clock = new ManualClock(t(4)))
compareTrackers(tracker, tracker2)
// rewrite first file
writeEventsManually(getLogFileName(t(0)), Seq(createBatchCleanup(t(0))))
assert(getWriteAheadLogFiles().length === 4)
// make sure trackers are consistent
val tracker3 = createTracker(recoverFromWriteAheadLog = true, clock = new ManualClock(t(4)))
compareTrackers(tracker, tracker3)
// rewrite second file
writeEventsManually(getLogFileName(t(1)), batch1.map(BlockAdditionEvent) :+ batch1Allocation)
assert(getWriteAheadLogFiles().length === 5)
// make sure trackers are consistent
val tracker4 = createTracker(recoverFromWriteAheadLog = true, clock = new ManualClock(t(4)))
compareTrackers(tracker, tracker4)
}
/**
* Create tracker object with the optional provided clock. Use fake clock if you
* want to control time by manually incrementing it to test log clean.
*/
def createTracker(
setCheckpointDir: Boolean = true,
recoverFromWriteAheadLog: Boolean = false,
clock: Clock = new SystemClock): ReceivedBlockTracker = {
val cpDirOption = if (setCheckpointDir) Some(checkpointDirectory.toString) else None
val tracker = new ReceivedBlockTracker(
conf, hadoopConf, Seq(streamId), clock, recoverFromWriteAheadLog, cpDirOption)
allReceivedBlockTrackers += tracker
tracker
}
/** Generate blocks infos using random ids */
def generateBlockInfos(): Seq[ReceivedBlockInfo] = {
List.fill(5)(ReceivedBlockInfo(streamId, Some(0L), None,
BlockManagerBasedStoreResult(StreamBlockId(streamId, math.abs(Random.nextInt)), Some(0L))))
}
/**
* Write received block tracker events to a file manually.
*/
def writeEventsManually(filePath: String, events: Seq[ReceivedBlockTrackerLogEvent]): Unit = {
val writer = HdfsUtils.getOutputStream(filePath, hadoopConf)
events.foreach { event =>
val bytes = Utils.serialize(event)
writer.writeInt(bytes.size)
writer.write(bytes)
}
writer.close()
}
/** Get all the data written in the given write ahead log file. */
def getWrittenLogData(logFile: String): Seq[ReceivedBlockTrackerLogEvent] = {
getWrittenLogData(Seq(logFile))
}
/** Get the log file name for the given log start time. */
def getLogFileName(time: Long, rollingIntervalSecs: Int = 1): String = {
checkpointDirectory.toString + File.separator + "receivedBlockMetadata" +
File.separator + s"log-$time-${time + rollingIntervalSecs * 1000}"
}
/**
* Get all the data written in the given write ahead log files. By default, it will read all
* files in the test log directory.
*/
def getWrittenLogData(logFiles: Seq[String] = getWriteAheadLogFiles)
: Seq[ReceivedBlockTrackerLogEvent] = {
logFiles.flatMap {
file => new FileBasedWriteAheadLogReader(file, hadoopConf).toSeq
}.flatMap { byteBuffer =>
val validBuffer = if (WriteAheadLogUtils.isBatchingEnabled(conf, isDriver = true)) {
Utils.deserialize[Array[Array[Byte]]](byteBuffer.array()).map(ByteBuffer.wrap)
} else {
Array(byteBuffer)
}
validBuffer.map(b => Utils.deserialize[ReceivedBlockTrackerLogEvent](b.array()))
}.toList
}
/** Get all the write ahead log files in the test directory */
def getWriteAheadLogFiles(): Seq[String] = {
import ReceivedBlockTracker._
val logDir = checkpointDirToLogDir(checkpointDirectory.toString)
getLogFilesInDirectory(logDir).map { _.toString }
}
/** Create batch allocation object from the given info */
def createBatchAllocation(time: Long, blockInfos: Seq[ReceivedBlockInfo])
: BatchAllocationEvent = {
BatchAllocationEvent(time, AllocatedBlocks(Map((streamId -> blockInfos))))
}
/** Create batch clean object from the given info */
def createBatchCleanup(time: Long, moreTimes: Long*): BatchCleanupEvent = {
BatchCleanupEvent((Seq(time) ++ moreTimes).map(Time.apply))
}
implicit def millisToTime(milliseconds: Long): Time = Time(milliseconds)
implicit def timeToMillis(time: Time): Long = time.milliseconds
}
| chenc10/Spark-PAF | streaming/src/test/scala/org/apache/spark/streaming/ReceivedBlockTrackerSuite.scala | Scala | apache-2.0 | 15,902 |
package com.twitter.finatra.multiserver.Add1HttpServer
import com.twitter.adder.thriftscala.Adder
import com.twitter.finagle.http.Request
import com.twitter.finatra.http.Controller
import com.twitter.util.Future
import javax.inject.Inject
class Add1Controller @Inject()(
adder: Adder[Future])
extends Controller {
get("/add1") { request: Request =>
val num = request.getIntParam("num")
adder.add1(num)
}
get("/add1String") { request: Request =>
val num = request.getParam("num")
adder.add1String(num)
}
get("/slowAdd1") { request: Request =>
val num = request.getParam("num")
adder.add1Slowly(num)
}
get("/errorAdd1") { request: Request =>
val num = request.getParam("num")
adder.add1AlwaysError(num)
}
} | syamantm/finatra | inject-thrift-client-http-mapper/src/test/scala/com/twitter/finatra/multiserver/Add1HttpServer/Add1Controller.scala | Scala | apache-2.0 | 762 |
package org.jetbrains.plugins.scala
package refactoring.introduceField
import java.io.File
import com.intellij.openapi.util.io.FileUtil
import com.intellij.openapi.util.text.StringUtil
import com.intellij.openapi.vfs.{CharsetToolkit, LocalFileSystem}
import com.intellij.testFramework.UsefulTestCase
import org.jetbrains.plugins.scala.base.ScalaLightPlatformCodeInsightTestCaseAdapter
import org.jetbrains.plugins.scala.extensions.PsiElementExt
import org.jetbrains.plugins.scala.lang.lexer.ScalaTokenTypes
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.typedef.ScTemplateDefinition
import org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.lang.psi.types.api.Int
import org.jetbrains.plugins.scala.lang.refactoring.introduceField.{IntroduceFieldContext, IntroduceFieldSettings, ScalaIntroduceFieldFromExpressionHandler}
import org.jetbrains.plugins.scala.lang.refactoring.util.ScalaRefactoringUtil.getExpressionWithTypes
import org.jetbrains.plugins.scala.project.ProjectContext
import org.jetbrains.plugins.scala.util.ScalaUtils
import org.junit.Assert._
/**
* Nikolay.Tropin
* 7/17/13
*/
abstract class IntroduceFieldTestBase() extends ScalaLightPlatformCodeInsightTestCaseAdapter {
private val startMarker = "/*start*/"
private val endMarker = "/*end*/"
private val replaceAllMarker = "/*replaceAll*/"
private val initInDeclarationMarker = "/*initInDeclaration*/"
private val initLocallyMarker = "/*initLocally*/"
private val selectedClassNumberMarker = "/*selectedClassNumber = "
def folderPath: String = baseRootPath() + "introduceField/"
implicit def projectContext: ProjectContext = getProjectAdapter
protected def doTest(scType: ScType = Int) {
val filePath = folderPath + getTestName(false) + ".scala"
val file = LocalFileSystem.getInstance.findFileByPath(filePath.replace(File.separatorChar, '/'))
assert(file != null, "file " + filePath + " not found")
var fileText = StringUtil.convertLineSeparators(FileUtil.loadFile(new File(file.getCanonicalPath), CharsetToolkit.UTF8))
val startOffset = fileText.indexOf(startMarker)
assert(startOffset != -1, "Not specified start marker in test case. Use /*start*/ in scala file for this.")
fileText = fileText.replace(startMarker, "")
val endOffset = fileText.indexOf(endMarker)
assert(endOffset != -1, "Not specified end marker in test case. Use /*end*/ in scala file for this.")
fileText = fileText.replace(endMarker, "")
configureFromFileTextAdapter(getTestName(false) + ".scala", fileText)
val scalaFile = getFileAdapter.asInstanceOf[ScalaFile]
val editor = getEditorAdapter
editor.getSelectionModel.setSelection(startOffset, endOffset)
var res: String = null
val lastPsi = scalaFile.findElementAt(scalaFile.getText.length - 1)
val replaceAll = fileText.contains(replaceAllMarker)
val initInDecl = if (fileText.contains(initInDeclarationMarker)) Some(true)
else if (fileText.contains(initLocallyMarker)) Some(false)
else None
val selectedClassNumber = fileText.indexOf(selectedClassNumberMarker) match {
case -1 => 0
case idx: Int => fileText.charAt(idx + selectedClassNumberMarker.length).toString.toInt
}
//start to inline
try {
val handler = new ScalaIntroduceFieldFromExpressionHandler
val Some((expr, types)) = getExpressionWithTypes(scalaFile, startOffset, endOffset)(getProjectAdapter, editor)
val aClass = expr.parents.toList.filter(_.isInstanceOf[ScTemplateDefinition])(selectedClassNumber).asInstanceOf[ScTemplateDefinition]
val ifc = new IntroduceFieldContext[ScExpression](getProjectAdapter, editor, scalaFile, expr, types, aClass)
val settings = new IntroduceFieldSettings[ScExpression](ifc)
settings.replaceAll = replaceAll
initInDecl.foreach(settings.initInDeclaration = _)
settings.defineVar = true
settings.name = "i"
settings.scType = scType
ScalaUtils.runWriteActionDoNotRequestConfirmation(new Runnable {
def run() {
handler.runRefactoring(ifc, settings)
UsefulTestCase.doPostponedFormatting(getProjectAdapter)
}
}, getProjectAdapter, "Test")
res = scalaFile.getText.substring(0, lastPsi.getTextOffset).trim
}
catch {
case e: Exception => assert(assertion = false, message = e.getMessage + "\\n" + e.getStackTrace.map(_.toString).mkString(" \\n"))
}
val text = lastPsi.getText
val output = lastPsi.getNode.getElementType match {
case ScalaTokenTypes.tLINE_COMMENT => text.substring(2).trim
case ScalaTokenTypes.tBLOCK_COMMENT | ScalaTokenTypes.tDOC_COMMENT =>
text.substring(2, text.length - 2).trim
case _ =>
assertTrue("Test result must be in last comment statement.", false)
""
}
assertEquals(output, res)
}
}
| triplequote/intellij-scala | scala/scala-impl/test/org/jetbrains/plugins/scala/refactoring/introduceField/IntroduceFieldTestBase.scala | Scala | apache-2.0 | 4,999 |
package com.github.novamage.svalidator.validation
/**
* Contains information about errors ocurred during the validation process
*
* @param validationFailures List of failures that occurred during validation
*/
case class ValidationSummary(validationFailures: List[ValidationFailure])
extends ValidationResult[Nothing] {
def localize(implicit localizer: Localizer): ValidationSummary = {
ValidationSummary(validationFailures.map(_.localize))
}
override def data: Option[Nothing] = None
}
object ValidationSummary {
final val Empty: ValidationSummary = ValidationSummary(Nil)
}
| NovaMage/SValidator | src/main/scala/com/github/novamage/svalidator/validation/ValidationSummary.scala | Scala | mit | 604 |
def f(arr:List[Int]):Int =
{
var count: Int = 0;
arr.foreach { _ => count += 1 }
count
}
| anishacharya/SCALA | Scala_Practice/List_length.scala | Scala | mit | 118 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.batch.sql
import org.apache.flink.api.scala._
import org.apache.flink.table.api.ValidationException
import org.apache.flink.table.api.scala._
import org.apache.flink.table.planner.utils.TableTestBase
import org.junit.Test
class RankTest extends TableTestBase {
private val util = batchTestUtil()
util.addTableSource[(Int, String, Long)]("MyTable", 'a, 'b, 'c)
@Test(expected = classOf[RuntimeException])
def testRowNumberWithoutOrderBy(): Unit = {
val sqlQuery =
"""
|SELECT ROW_NUMBER() over (partition by a) FROM MyTable
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test(expected = classOf[RuntimeException])
def testRowNumberWithMultiGroups(): Unit = {
val sqlQuery =
"""
|SELECT ROW_NUMBER() over (partition by a order by b) as a,
| ROW_NUMBER() over (partition by b) as b
| FROM MyTable
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test(expected = classOf[ValidationException])
def testRankWithoutOrderBy(): Unit = {
val sqlQuery =
"""
|SELECT RANK() over (partition by a) FROM MyTable
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test(expected = classOf[ValidationException])
def testRankWithMultiGroups(): Unit = {
val sqlQuery =
"""
|SELECT RANK() over (partition by a order by b) as a,
| RANK() over (partition by b) as b
| FROM MyTable
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test(expected = classOf[ValidationException])
def testDenseRankWithoutOrderBy(): Unit = {
val sqlQuery =
"""
|SELECT dense_rank() over (partition by a) FROM MyTable
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test(expected = classOf[ValidationException])
def testDenseRankWithMultiGroups(): Unit = {
val sqlQuery =
"""
|SELECT DENSE_RANK() over (partition by a order by b) as a,
| DENSE_RANK() over (partition by b) as b
| FROM MyTable
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testRankValueFilterWithUpperValue(): Unit = {
val sqlQuery =
"""
|SELECT * FROM (
| SELECT a, b, RANK() OVER (PARTITION BY b ORDER BY a) rk FROM MyTable) t
|WHERE rk <= 2 AND a > 10
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testRankValueFilterWithRange(): Unit = {
val sqlQuery =
"""
|SELECT * FROM (
| SELECT a, b, RANK() OVER (PARTITION BY b, c ORDER BY a) rk FROM MyTable) t
|WHERE rk <= 2 AND rk > -2
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testRankValueFilterWithEquals(): Unit = {
val sqlQuery =
"""
|SELECT * FROM (
| SELECT a, b, RANK() OVER (PARTITION BY b ORDER BY a, c) rk FROM MyTable) t
|WHERE rk = 2
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testWithoutPartitionBy(): Unit = {
val sqlQuery =
"""
|SELECT * FROM (
| SELECT a, b, RANK() OVER (ORDER BY a) rk FROM MyTable) t
|WHERE rk < 10
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testMultiSameRankFunctionsWithSameGroup(): Unit = {
val sqlQuery =
"""
|SELECT * FROM (
| SELECT a, b,
| RANK() OVER (PARTITION BY b ORDER BY a) rk1,
| RANK() OVER (PARTITION BY b ORDER BY a) rk2 FROM MyTable) t
|WHERE rk1 < 10
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testDuplicateRankFunctionColumnName(): Unit = {
util.addTableSource[(Int, Long, String)]("MyTable2", 'a, 'b, 'rk)
val sqlQuery =
"""
|SELECT * FROM (
| SELECT a, b, RANK() OVER (PARTITION BY b ORDER BY a) rk FROM MyTable2) t
|WHERE rk < 10
""".stripMargin
util.verifyPlan(sqlQuery)
}
@Test
def testRankFunctionInMiddle(): Unit = {
val sqlQuery =
"""
|SELECT * FROM (
| SELECT a, RANK() OVER (PARTITION BY a ORDER BY a) rk, b, c FROM MyTable) t
|WHERE rk < 10
""".stripMargin
util.verifyPlan(sqlQuery)
}
}
| hequn8128/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/planner/plan/batch/sql/RankTest.scala | Scala | apache-2.0 | 5,007 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.rpc
import scala.concurrent.Future
import scala.reflect.ClassTag
import org.apache.spark.SparkConf
import org.apache.spark.internal.Logging
import org.apache.spark.util.RpcUtils
/**
* A reference for a remote [[RpcEndpoint]]. [[RpcEndpointRef]] is thread-safe.
*/
private[spark] abstract class RpcEndpointRef(conf: SparkConf)
extends Serializable with Logging {
private[this] val maxRetries = RpcUtils.numRetries(conf)
private[this] val retryWaitMs = RpcUtils.retryWaitMs(conf)
private[this] val defaultAskTimeout = RpcUtils.askRpcTimeout(conf)
/**
* return the address for the [[RpcEndpointRef]]
*/
def address: RpcAddress
def name: String
/**
* Sends a one-way asynchronous message. Fire-and-forget semantics.
*/
def send(message: Any): Unit
/**
* Send a message to the corresponding [[RpcEndpoint.receiveAndReply)]] and return a [[Future]] to
* receive the reply within the specified timeout.
*
* This method only sends the message once and never retries.
*/
def ask[T: ClassTag](message: Any, timeout: RpcTimeout): Future[T]
/**
* Send a message to the corresponding [[RpcEndpoint.receiveAndReply)]] and return a [[Future]] to
* receive the reply within a default timeout.
*
* This method only sends the message once and never retries.
*/
def ask[T: ClassTag](message: Any): Future[T] = ask(message, defaultAskTimeout)
/**
* Send a message to the corresponding [[RpcEndpoint.receiveAndReply]] and get its result within a
* default timeout, throw an exception if this fails.
*
* Note: this is a blocking action which may cost a lot of time, so don't call it in a message
* loop of [[RpcEndpoint]].
* @param message the message to send
* @tparam T type of the reply message
* @return the reply message from the corresponding [[RpcEndpoint]]
*/
def askSync[T: ClassTag](message: Any): T = askSync(message, defaultAskTimeout)
/**
* Send a message to the corresponding [[RpcEndpoint.receiveAndReply]] and get its result within a
* specified timeout, throw an exception if this fails.
*
* Note: this is a blocking action which may cost a lot of time, so don't call it in a message
* loop of [[RpcEndpoint]].
*
* @param message the message to send
* @param timeout the timeout duration
* @tparam T type of the reply message
* @return the reply message from the corresponding [[RpcEndpoint]]
*/
def askSync[T: ClassTag](message: Any, timeout: RpcTimeout): T = {
val future = ask[T](message, timeout)
timeout.awaitResult(future)
}
}
| aosagie/spark | core/src/main/scala/org/apache/spark/rpc/RpcEndpointRef.scala | Scala | apache-2.0 | 3,417 |
package target
import target.jvm._
import t_inline.{InlinePriority,MAYBE_HOT_USE,SINGLE_USE,MAYBE_COLD_USE}
object JVMConfig extends TargetConfig {
def maxInlineInstructions(priority: InlinePriority) = priority match {
// Note that this must not go about 10,000. 10,000 is treated as a
// 'never inline value'
case SINGLE_USE => 1000
// On the JVM, the cost of executing a function call can be quite high.
// It involves:
// new
// dup
// (then probably, for more than one argument)
// new Tuple
// dup
// init
// ... Generate and box each arg ... which may or may not be needed anyway
// init
// invokevirtual
// So at least 7 instructions.
// For a maybe cold use, we take this at face value.
//
// For a maybe hot use, we note that if the function had a lot of curried
// arguments, then it is going to be expensive execute by repeated
// currying.
//
// Arbitrarily, we return 4 times this.
case MAYBE_HOT_USE => 28
// Currently the cold funciton analysis is very bad. It does not consider
// transitively hot functions. Once that is resolved, this should be
// lowered.
case MAYBE_COLD_USE => 28
}
def peepholeSet =
JVMPeephole.getSet
}
| j-c-w/mlc | src/main/scala/target/JVMConfig.scala | Scala | gpl-3.0 | 1,275 |
package com.arcusys.learn.liferay.util
import com.liferay.portal.service.GroupLocalServiceUtil
object CourseUtilHelper {
def getLink(courseId: Long): String = {
val course = GroupLocalServiceUtil.getGroup(courseId)
if (course.getPrivateLayoutsPageCount > 0)
"/group" + course.getFriendlyURL
else
"/web" + course.getFriendlyURL
}
def getName(courseId: Long): Option[String] = {
Option(GroupLocalServiceUtil.fetchGroup(courseId))
.map(_.getDescriptiveName)
}
def getCompanyId(courseId: Long): Long = {
GroupLocalServiceUtil.getGroup(courseId).getCompanyId
}
}
| arcusys/Valamis | learn-liferay620-services/src/main/scala/com/arcusys/learn/liferay/util/CourseUtilHelper.scala | Scala | gpl-3.0 | 615 |
/* _____ _
* | ___| __ __ _ _ __ ___ (_) __ _ _ __
* | |_ | '__/ _` | '_ ` _ \| |/ _` | '_ \
* | _|| | | (_| | | | | | | | (_| | | | |
* |_| |_| \__,_|_| |_| |_|_|\__,_|_| |_|
*
* Copyright 2014 Pellucid Analytics
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package framian
package column
import spire.macros.{ Checked, ArithmeticOverflowException }
private[framian] case class EvalColumn[A](f: Int => Cell[A]) extends BoxedColumn[A] {
override def apply(row: Int): Cell[A] = f(row)
def cellMap[B](g: Cell[A] => Cell[B]): Column[B] = EvalColumn(f andThen g)
def reindex(index: Array[Int]): Column[A] =
DenseColumn.force(EvalColumn(index andThen f), index.length)
def force(len: Int): Column[A] =
DenseColumn.force(this, len)
def mask(mask: Mask): Column[A] = EvalColumn { row =>
if (mask(row)) NA else f(row)
}
def setNA(naRow: Int): Column[A] = EvalColumn { row =>
if (row == naRow) NA else f(row)
}
def memoize(optimistic: Boolean): Column[A] =
if (optimistic) new OptimisticMemoizingColumn(f)
else new PessimisticMemoizingColumn(f)
def orElse[A0 >: A](that: Column[A0]): Column[A0] =
EvalColumn { row =>
f(row) match {
case NM => that(row) match {
case NA => NM
case cell => cell
}
case NA => that(row)
case cell => cell
}
}
def shift(n: Int): Column[A] = EvalColumn { row =>
try {
f(Checked.minus(row, n))
} catch { case (_: ArithmeticOverflowException) =>
// If we overflow, then it means that `row - n` overflowed and, hence,
// wrapped around. Since `shift` is meant to just shift rows, and not
// wrap them back around, we return an NA. So, if we have a `Column`
// defined for all rows, and shift it forward 1 row, then
// `Column(Int.MinValue)` should return `NA`.
NA
}
}
def zipMap[B, C](that: Column[B])(f: (A, B) => C): Column[C] = that match {
case (that: DenseColumn[_]) =>
DenseColumn.zipMap[A, B, C](this.force(that.values.length).asInstanceOf[DenseColumn[A]], that.asInstanceOf[DenseColumn[B]], f)
case _ =>
EvalColumn { row =>
(this(row), that(row)) match {
case (Value(a), Value(b)) => Value(f(a, b))
case (NA, _) | (_, NA) => NA
case _ => NM
}
}
}
}
| codeaudit/framian | framian/src/main/scala/framian/column/EvalColumn.scala | Scala | apache-2.0 | 2,880 |
package Generator
trait Generator {
type NodeType
type Connections
def neighbors(n: NodeType): Set[NodeType]
def connect(n: NodeType, ns: Set[NodeType], c: Connections): Connections
}
| neilvallon/Beagle | src/Generator/Generator.scala | Scala | mit | 193 |
package org.jetbrains.plugins.scala.lang.parser.parsing.top.params
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import org.jetbrains.plugins.scala.lang.parser.parsing.builder.ScalaPsiBuilder
/**
* @author Alexander Podkhalyuzin
* Date: 08.02.2008
*/
/*
* ClassParamClauses ::= {ClassParamClause}
* [[nl] '(' 'implicit' ClassParams ')']
*/
object ClassParamClauses extends ClassParamClauses {
override protected def classParamClause = ClassParamClause
override protected def implicitClassParamClause = ImplicitClassParamClause
}
trait ClassParamClauses {
protected def classParamClause: ClassParamClause
protected def implicitClassParamClause: ImplicitClassParamClause
def parse(builder: ScalaPsiBuilder): Boolean = {
val classParamClausesMarker = builder.mark
while (classParamClause parse builder) {}
implicitClassParamClause parse builder
classParamClausesMarker.done(ScalaElementTypes.PARAM_CLAUSES)
true
}
} | gtache/intellij-lsp | intellij-lsp-dotty/src/org/jetbrains/plugins/scala/lang/parser/parsing/top/params/ClassParamClauses.scala | Scala | apache-2.0 | 993 |
import sbt._
import Keys._
object B extends Build
{
lazy val dep = Project("dep", file("dep")) settings( baseSettings : _*) settings(
organization := "org.example",
version := "1.0",
publishTo <<= baseDirectory in ThisBuild apply { base =>
Some(Resolver.file("file", base / "repo")(Resolver.ivyStylePatterns))
}
)
lazy val use = Project("use", file("use")) settings(baseSettings : _*) settings(
libraryDependencies += "org.example" %% "dep" % "1.0",
externalIvySettings(),
publishTo <<= baseDirectory { base =>
Some(Resolver.file("file", base / "repo")(Resolver.ivyStylePatterns))
},
TaskKey[Unit]("check") <<= baseDirectory map {base =>
val inCache = ( (base / "target" / "use-cache") ** "*.jar").get
assert(inCache.isEmpty, "Cache contained jars: " + inCache)
}
)
lazy val baseSettings = Seq(
autoScalaLibrary := false,
unmanagedJars in Compile <++= scalaInstance map (_.jars),
publishArtifact in packageSrc := false,
publishArtifact in packageDoc := false,
publishMavenStyle := false
)
} | jamesward/xsbt | sbt/src/sbt-test/dependency-management/ivy-settings-c/project/P.scala | Scala | bsd-3-clause | 1,039 |
//package io.youi.example.ui.hypertext
//
//import io.youi.Color
//import io.youi.hypertext.border.BorderStyle
//import io.youi.hypertext.style.Overflow
//import io.youi.hypertext.{Button, Component, Container}
//import io.youi.layout.GridLayout
//
//import scala.concurrent.Future
//
//object GridLayoutExample extends HTMLScreen {
// override def name: String = "HTML GridLayout Example"
//
// override protected def load(): Future[Unit] = super.load().map { _ =>
// val box1: Component = createBox("box1", Color.Red)
// val box2: Component = createBox("box2", Color.Green)
// val box3: Component = createBox("box3", Color.Blue)
// val box4: Component = createBox("box4", Color.Magenta)
// val box5: Component = createBox("box5", Color.Cyan)
// val box6: Component = createBox("box6", Color.SandyBrown)
// val box7: Component = createBox("box7", Color.Orange)
// val box8: Component = createBox("box8", Color.DarkSlateGray)
//
// val layoutContainer = new Container {
// id := "main"
//
// layoutManager := new GridLayout {
// columns := 3
// config.default.margin.top := Some(5.0)
// config.default.margin.left := Some(5.0)
// }
// backgroundColor := Color.Black
// size.height := 500.0
// position.left := 100.0
// position.top := 100.0
// overflow.x := Overflow.Hidden
// overflow.y := Overflow.Auto
//
// children += box1
// children += box2
// children += box3
// children += box4
// children += box5
// children += box6
// children += box7
// children += box8
// }
// container.children += layoutContainer
//
// container.children += new Button {
// text := "Jump to Middle"
//
// event.click.attach { _ =>
// layoutContainer.scrollbar.vertical.percentage := 0.5
// }
// }
// }
//
// private def createBox(name: String, c: Color): Component = new Container {
// id := name
// size.width := 350.0
// size.height := 250.0
// backgroundColor := c
// border.color := Some(Color.DeepPink)
// border.size := Some(1.0)
// border.style := Some(BorderStyle.Solid)
// border.radius := 5.0
// }
//
// override def path: String = "/examples/html/gridlayout.html"
//}
| outr/youi | example/js/src/main/scala/io/youi/example/ui/hypertext/GridLayoutExample.scala | Scala | mit | 2,260 |
/*
* Copyright (C) 2016 Christopher Batey and Dogan Narinc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scassandra.server.e2e.prepared
import java.util
import com.datastax.driver.core.Row
import dispatch.Defaults._
import dispatch._
import org.scalatest.BeforeAndAfter
import org.scalatest.concurrent.ScalaFutures
import org.scassandra.codec.datatype.DataType
import org.scassandra.server.priming.prepared.{ThenPreparedSingle, WhenPrepared}
import org.scassandra.server.{AbstractIntegrationTest, PrimingHelper}
class PreparedStatementWithCollectionsTest extends AbstractIntegrationTest with BeforeAndAfter with ScalaFutures {
before {
val svc = url("http://localhost:8043/prime-prepared-single").DELETE
val response = Http(svc OK as.String)
response()
}
test("Text map as a variable and column type") {
// given
val preparedStatementText = "Select * from people where id = ? and map = ?"
val primedRow = Map[String, Any]("map_column" -> Map("the" -> "result"))
val mapVariale = new util.HashMap[String, String]()
mapVariale.put("one", "ONE")
mapVariale.put("two", "TWO")
val variableTypes: List[DataType] = List(DataType.Int, DataType.Map(DataType.Text, DataType.Text))
PrimingHelper.primePreparedStatement(
WhenPrepared(Some(preparedStatementText)),
ThenPreparedSingle(Some(List(primedRow)),
variable_types = Some(variableTypes),
column_types = Some(Map[String, DataType]("map_column" -> DataType.Map(DataType.Text, DataType.Text)))
)
)
// when
val preparedStatement = session.prepare(preparedStatementText)
val boundStatement = preparedStatement.bind(Int.box(1), mapVariale)
val result = session.execute(boundStatement)
// then
val all: util.List[Row] = result.all()
all.size() should equal(1)
val resultRow = all.get(0)
val resultMap = resultRow.getMap("map_column", classOf[String], classOf[String])
resultMap.get("the") should equal("result")
}
}
| mikefero/cpp-driver | gtests/src/integration/scassandra/server/server/src/test/scala/org/scassandra/server/e2e/prepared/PreparedStatementWithCollectionsTest.scala | Scala | apache-2.0 | 2,518 |
package org.opennetworkinsight.netflow
object FlowColumnIndex extends Enumeration {
val HOUR = 4
val MINUTE = 5
val SECOND = 6
val SOURCEIP = 8
val DESTIP = 9
val SOURCEPORT = 10
val DESTPORT = 11
val IPKT = 16
val IBYT = 17
val NUMTIME = 27
val IBYTBIN = 28
val IPKTYBIN = 29
val TIMEBIN = 30
val PORTWORD = 31
val IPPAIR = 32
val SOURCEWORD = 33
val DESTWORD = 34
}
| Open-Network-Insight/oni-ml | src/main/scala/org/opennetworkinsight/netflow/FlowColumnIndex.scala | Scala | apache-2.0 | 440 |
package pages
import org.scalatest.selenium.WebBrowser.currentUrl
import pages.vrm_assign.VrmLockedPage.url
import uk.gov.dvla.vehicles.presentation.common.helpers.webbrowser.WebBrowserDriver
final class VrmLockedPageSteps(implicit webDriver: WebBrowserDriver)
extends helpers.AcceptanceTestHelper {
def `is displayed` = {
eventually {
currentUrl should equal(url)
}
this
}
}
| dvla/vrm-assign-online | acceptance-tests/src/test/scala/pages/VrmLockedPageSteps.scala | Scala | mit | 403 |
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2014 Matthias Langer ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package edu.latrobe.blaze.modules
import edu.latrobe._
import edu.latrobe.blaze._
import edu.latrobe.blaze.modules.jvm._
/**
* SoftPlus function
* (differentiable approximation of rectified linear)
* ( x )
* Activation: softPlus(x) = ln( 1 + e )
* ( )
*
* -1 ( x )
* Inverse: softPlus(x) = ln( e - 1 )
* ( )
*
* 1
* Gradient: softPlus(x)' = ------- = sigmoid(x)
* -x
* 1 + e
*/
abstract class SoftPlus
extends NonTrainableMapLayer[SoftPlusBuilder]
with NonPenalizing {
// ---------------------------------------------------------------------------
// Forward propagation related.
// ---------------------------------------------------------------------------
final override protected def doPredict(mode: Mode,
inPlaceAllowed: Boolean,
input: Tensor,
reference: Tensor)
: (Tensor, PredictContext) = {
val out = doPredict(input)
(out, EmptyContext)
}
protected def doPredict(input: Tensor): Tensor
final override protected def doPredictInv(output: Tensor,
context: PredictContext)
: Tensor = doPredictInv(output)
protected def doPredictInv(output: Tensor): Tensor
// ---------------------------------------------------------------------------
// Back propagation related.
// ---------------------------------------------------------------------------
final override val backpropagationRequirementsForInput
: TensorDependency = TensorDependency.Required
final override val backpropagationRequirementsForOutput
: TensorDependency = TensorDependency.NotRequired
final override protected def doDeriveInputError(input: Tensor,
reference: Tensor,
output: Tensor,
context: PredictContext,
error: Tensor)
: Tensor = doDeriveInputError(input, error)
protected def doDeriveInputError(input: Tensor, error: Tensor): Tensor
}
final class SoftPlusBuilder
extends NonTrainableMapLayerBuilder[SoftPlusBuilder] {
override def repr
: SoftPlusBuilder = this
override def canEqual(that: Any)
: Boolean = that.isInstanceOf[SoftPlusBuilder]
override protected def doCopy()
: SoftPlusBuilder = SoftPlusBuilder()
override def outputPlatformFor(hints: BuildHints)
: Platform = SoftPlusBuilder.outputPlatformFor(this, hints)
override def build(hints: BuildHints,
seed: InstanceSeed,
weightsBuilder: ValueTensorBufferBuilder)
: Module = SoftPlusBuilder.lookupAndBuild(this, hints, seed, weightsBuilder)
}
object SoftPlusBuilder
extends ModuleVariantTable[SoftPlusBuilder] {
register(2, SoftPlus_JVM_Baseline_Description)
final def apply(): SoftPlusBuilder = new SoftPlusBuilder
}
| bashimao/ltudl | blaze/src/main/scala/edu/latrobe/blaze/modules/SoftPlus.scala | Scala | apache-2.0 | 3,941 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query
import org.apache.atlas.repository.BaseTest
import org.testng.annotations.{BeforeMethod,Test}
class ParserTest extends BaseTest {
@BeforeMethod
override def setup {
super.setup
QueryTestsUtils.setupTypes
}
@Test def testFrom: Unit = {
val p = new QueryParser
println(p("from DB").right.get.toString)
}
@Test def testFrom2: Unit = {
val p = new QueryParser
println(p("DB").right.get.toString)
}
@Test def testJoin1: Unit = {
val p = new QueryParser
println(p("DB, Table").right.get.toString)
}
@Test def testWhere1: Unit = {
val p = new QueryParser
println(p("DB as db1 Table where db1.name ").right.get.toString)
}
@Test def testWhere2: Unit = {
val p = new QueryParser
println(p("DB name = \\"Reporting\\"").right.get.toString)
}
@Test def testIsTrait: Unit = {
val p = new QueryParser
println(p("Table isa Dimension").right.get.toString)
println(p("Table is Dimension").right.get.toString)
}
@Test def test4: Unit = {
val p = new QueryParser
println(p("DB where (name = \\"Reporting\\") select name as _col_0, (createTime + 1) as _col_1").right.get.toString)
}
@Test def testJoin2: Unit = {
val p = new QueryParser
println(p("DB as db1 where (createTime + 1) > 0 and (db1.name = \\"Reporting\\") or DB has owner Table as tab " +
" select db1.name as dbName, tab.name as tabName").right.get.toString)
}
@Test def testLoop: Unit = {
val p = new QueryParser
println(p("Table loop (LoadProcess outputTable)").right.get.toString)
}
@Test def testNegInvalidateType: Unit = {
val p = new QueryParser
val x = p("from blah")
println(p("from blah").left)
}
@Test def testPath1: Unit = {
val p = new QueryParser
println(p("Table loop (LoadProcess outputTable) withPath").right.get.toString)
}
@Test def testPath2: Unit = {
val p = new QueryParser
println(p(
"Table as src loop (LoadProcess outputTable) as dest " +
"select src.name as srcTable, dest.name as destTable withPath").right.get.toString
)
}
@Test def testList: Unit = {
val p = new QueryParser
println(p(
"Partition as p where values = ['2015-01-01']," +
" table where name = 'tableoq8ty'," +
" db where name = 'default' and clusterName = 'test'").right.get.toString
)
}
}
| SarahMehddi/HelloWorld | repository/src/test/scala/org/apache/atlas/query/ParserTest.scala | Scala | apache-2.0 | 3,341 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import scala.collection.mutable.ListBuffer
import org.scalatest.events.Event
import org.scalatest.events.Ordinal
import org.scalatest.SharedHelpers.SilentReporter
import org.scalatest.exceptions.NotAllowedException
class BeforeNAfterSuite extends FunSuite {
class TheSuper extends Spec {
var runTestWasCalled = false
var runWasCalled = false
protected override def runTest(testName: String, args: Args): Status = {
runTestWasCalled = true
super.runTest(testName, args)
}
override def run(testName: Option[String], args: Args): Status = {
runWasCalled = true
super.run(testName, args)
}
}
class MySuite extends TheSuper with BeforeAndAfter {
var beforeCalledBeforeRunTest = false
var afterCalledAfterRunTest = false
before {
if (!runTestWasCalled)
beforeCalledBeforeRunTest = true
}
def `test something` = ()
after {
if (runTestWasCalled)
afterCalledAfterRunTest = true
}
}
test("super's runTest must be called") {
val a = new MySuite
a.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap("hi" -> "there"), None, new Tracker, Set.empty))
assert(a.runTestWasCalled)
}
test("super's run must be called") {
val a = new MySuite
a.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap("hi" -> "there"), None, new Tracker, Set.empty))
assert(a.runWasCalled)
}
test("before gets called before runTest") {
val a = new MySuite
a.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap("hi" -> "there"), None, new Tracker, Set.empty))
assert(a.beforeCalledBeforeRunTest)
}
test("after gets called after runTest") {
val a = new MySuite
a.run(None, Args(SilentReporter, Stopper.default, Filter(), ConfigMap("hi" -> "there"), None, new Tracker, Set.empty))
assert(a.afterCalledAfterRunTest)
}
// test exceptions with runTest
test("If any invocation of before completes abruptly with an exception, runTest " +
"will complete abruptly with the same exception.") {
class MySuite extends Suite with BeforeAndAfter {
before { throw new NumberFormatException }
}
intercept[NumberFormatException] {
val a = new MySuite
a.run(Some("july"), Args(StubReporter))
}
}
test("If any call to super.runTest completes abruptly with an exception, runTest " +
"will complete abruptly with the same exception, however, before doing so, it will invoke after") {
trait FunkySuite extends Suite {
protected override def runTest(testName: String, args: Args): Status = {
throw new NumberFormatException
}
}
class MySuite extends FunkySuite with BeforeAndAfter {
var afterCalled = false
after {
afterCalled = true
}
}
val a = new MySuite
intercept[NumberFormatException] {
a.run(Some("july"), Args(StubReporter))
}
assert(a.afterCalled)
}
test("If both super.runTest and after complete abruptly with an exception, runTest " +
"will complete abruptly with the exception thrown by super.runTest.") {
trait FunkySuite extends Suite {
protected override def runTest(testName: String, args: Args): Status = {
throw new NumberFormatException
}
}
class MySuite extends FunkySuite with BeforeAndAfter {
var afterCalled = false
after {
afterCalled = true
throw new IllegalArgumentException
}
}
val a = new MySuite
intercept[NumberFormatException] {
a.run(Some("july"), Args(StubReporter))
}
assert(a.afterCalled)
}
test("If super.runTest returns normally, but after completes abruptly with an " +
"exception, runTest will complete abruptly with the same exception.") {
class MySuite extends Spec with BeforeAndAfter {
after { throw new NumberFormatException }
def `test July` = ()
}
intercept[NumberFormatException] {
val a = new MySuite
a.run(Some("test July"), Args(StubReporter))
}
}
// test exceptions with run
test("If before is called twice, the second invocation should produce NotAllowedException") {
var beforeRegisteredFirstTime = false
var beforeRegisteredSecondTime = false
class MySuite extends Suite with BeforeAndAfter {
var s = "zero"
before {
s = "one"
}
beforeRegisteredFirstTime = true
before {
s = "two"
}
beforeRegisteredSecondTime = true
}
intercept[NotAllowedException] {
new MySuite
}
assert(beforeRegisteredFirstTime)
assert(!beforeRegisteredSecondTime)
}
test("If before is called after run is invoked, the test should fail with NotAllowedException") {
var beforeRegisteredFirstTime = false
var beforeRegisteredSecondTime = false
class MySuite extends FunSuite with BeforeAndAfter {
var s = "zero"
var notAllowedExceptionThrown = false
test("this one should fail") {
try {
before {
s = "one"
}
}
catch {
case _: NotAllowedException => notAllowedExceptionThrown = true
case e: Throwable => throw e
}
}
}
val a = new MySuite
a.run(None, Args(StubReporter))
assert(a.notAllowedExceptionThrown)
}
test("If after is called twice, the second invocation should produce NotAllowedException") {
var afterRegisteredFirstTime = false
var afterRegisteredSecondTime = false
class MySuite extends Suite with BeforeAndAfter {
var s = "zero"
after {
s = "one"
}
afterRegisteredFirstTime = true
after {
s = "two"
}
afterRegisteredSecondTime = true
}
intercept[NotAllowedException] {
new MySuite
}
assert(afterRegisteredFirstTime)
assert(!afterRegisteredSecondTime)
}
test("If after is called after run is invoked, the test should fail with NotAllowedException") {
var afterRegisteredFirstTime = false
var afterRegisteredSecondTime = false
class MySuite extends FunSuite with BeforeAndAfter {
var s = "zero"
var notAllowedExceptionThrown = false
test("this one should fail") {
try {
after {
s = "one"
}
}
catch {
case _: NotAllowedException => notAllowedExceptionThrown = true
case e: Throwable => throw e
}
}
}
val a = new MySuite
a.run(None, Args(StubReporter))
assert(a.notAllowedExceptionThrown)
}
}
class BeforeNAfterExtendingSuite extends Spec with BeforeAndAfter {
var sb: StringBuilder = _
val lb = new ListBuffer[String]
before {
sb = new StringBuilder("ScalaTest is ")
lb.clear()
}
def `test easy` = {
sb.append("easy!")
assert(sb.toString === "ScalaTest is easy!")
assert(lb.isEmpty)
lb += "sweet"
}
def `test fun` = {
sb.append("fun!")
assert(sb.toString === "ScalaTest is fun!")
assert(lb.isEmpty)
}
}
class BeforeNAfterExtendingFunSuite extends FunSuite with BeforeAndAfter {
var sb: StringBuilder = _
val lb = new ListBuffer[String]
before {
sb = new StringBuilder("ScalaTest is ")
lb.clear()
}
test("easy") {
sb.append("easy!")
assert(sb.toString === "ScalaTest is easy!")
assert(lb.isEmpty)
lb += "sweet"
}
test("fun") {
sb.append("fun!")
assert(sb.toString === "ScalaTest is fun!")
assert(lb.isEmpty)
}
}
| SRGOM/scalatest | scalatest-test/src/test/scala/org/scalatest/BeforeNAfterSuite.scala | Scala | apache-2.0 | 8,141 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//
// package com.intel.analytics.bigdl.dllib.feature.pmem
//
// import org.apache.spark.unsafe.Platform
//
// class VarLenBytesArray(recordNum: Int, totalSizeByBytes: Long,
// memoryType: MemoryType = PMEM) extends NativeVarLenArray[Byte](recordNum,
// totalSizeByBytes, memoryType, 0) {
//
// def putSingle(offset: Long, s: Byte): Unit = {
// Platform.putByte(null, offset, s.asInstanceOf[Byte])
// }
//
// override def getTypeOffSet(): Int = Platform.BYTE_ARRAY_OFFSET
// }
//
// class FixLenBytesArray(val numOfRecord: Long, val sizeOfRecordByByte: Int,
// memoryType: MemoryType = PMEM) extends
// NativeArray[Array[Byte]](numOfRecord * sizeOfRecordByByte, memoryType) {
//
// override def get(i: Int): Array[Byte] = {
// val result = new Array[Byte](sizeOfRecordByByte)
// Platform.copyMemory(null, indexOf(i), result, Platform.BYTE_ARRAY_OFFSET, sizeOfRecordByByte)
// return result
// }
//
// // TODO: would be slow if we put byte one by one.
// def set(i: Int, bytes: Array[Byte]): Unit = {
// assert(!deleted)
// val startOffset = indexOf(i)
// var j = 0
// while(j < bytes.length) {
// Platform.putByte(null, startOffset + j, bytes(j))
// j += 1
// }
// }
//
// def indexOf(i: Int): Long = {
// val index = startAddr + (i * sizeOfRecordByByte)
// assert(index <= lastOffSet)
// index
// }
// }
//
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/feature/pmem/NativeByteArray.scala | Scala | apache-2.0 | 1,974 |
package models
import anorm._
import play.api.libs.json._
trait PkFormatter {
implicit object PkFormat extends Format[Pk[Long]] {
def reads(json: JsValue):JsResult[Pk[Long]] = JsSuccess(Id(json.as[Long]))
def writes(id: Pk[Long]):JsNumber = JsNumber(id.get)
}
}
| radiantgeek/spending_map | app/models/PkFormatter.scala | Scala | apache-2.0 | 276 |
import sbt._
import Keys._
import sbtassembly.Plugin._
import AssemblyKeys._
import sbtrelease.ReleasePlugin._
import com.typesafe.sbt.SbtScalariform._
import net.virtualvoid.sbt.graph.Plugin.graphSettings
import ohnosequences.sbt.SbtS3Resolver.S3Resolver
import ohnosequences.sbt.SbtS3Resolver.{ s3, s3resolver }
import org.scalastyle.sbt.ScalastylePlugin.{ Settings => styleSettings }
import scalariform.formatter.preferences._
import sbtbuildinfo.Plugin._
import spray.revolver.RevolverPlugin.Revolver.{settings => revolverSettings}
object MarathonBuild extends Build {
lazy val root = Project(
id = "marathon",
base = file("."),
settings = baseSettings ++
asmSettings ++
releaseSettings ++
publishSettings ++
formatSettings ++
styleSettings ++
revolverSettings ++
graphSettings ++
Seq(
libraryDependencies ++= Dependencies.root,
parallelExecution in Test := false,
fork in Test := true
)
)
.configs(IntegrationTest)
.settings(inConfig(IntegrationTest)(Defaults.testTasks): _*)
.settings(testOptions in Test := Seq(Tests.Argument("-l", "integration")))
.settings(testOptions in IntegrationTest := Seq(Tests.Argument("-n", "integration")))
lazy val testScalaStyle = taskKey[Unit]("testScalaStyle")
lazy val IntegrationTest = config("integration") extend Test
lazy val baseSettings = Defaults.defaultSettings ++ buildInfoSettings ++ Seq (
organization := "mesosphere",
scalaVersion := "2.11.5",
scalacOptions in Compile ++= Seq(
"-encoding", "UTF-8",
"-target:jvm-1.6",
"-deprecation",
"-feature",
"-unchecked",
"-Xlog-reflective-calls",
"-Xlint",
"-Ywarn-unused-import",
"-Xfatal-warnings",
"-Yno-adapted-args",
"-Ywarn-numeric-widen"
),
javacOptions in Compile ++= Seq("-encoding", "UTF-8", "-source", "1.6", "-target", "1.6", "-Xlint:unchecked", "-Xlint:deprecation"),
resolvers ++= Seq(
"Mesosphere Public Repo" at "http://downloads.mesosphere.io/maven",
"Twitter Maven2 Repository" at "http://maven.twttr.com/",
"Typesafe Releases" at "http://repo.typesafe.com/typesafe/releases/",
"Spray Maven Repository" at "http://repo.spray.io/"
),
sourceGenerators in Compile <+= buildInfo,
fork in Test := true,
buildInfoKeys := Seq[BuildInfoKey](name, version, scalaVersion),
buildInfoPackage := "mesosphere.marathon",
testScalaStyle := {
org.scalastyle.sbt.PluginKeys.scalastyle.toTask("").value
},
(test in Test) <<= (test in Test) dependsOn testScalaStyle
)
lazy val asmSettings = assemblySettings ++ Seq(
mergeStrategy in assembly <<= (mergeStrategy in assembly) { old =>
{
case "application.conf" => MergeStrategy.concat
case "META-INF/jersey-module-version" => MergeStrategy.first
case "log4j.properties" => MergeStrategy.concat
case "org/apache/hadoop/yarn/util/package-info.class" => MergeStrategy.first
case "org/apache/hadoop/yarn/factories/package-info.class" => MergeStrategy.first
case "org/apache/hadoop/yarn/factory/providers/package-info.class" => MergeStrategy.first
case x => old(x)
}
},
excludedJars in assembly <<= (fullClasspath in assembly) map { cp =>
val exclude = Set(
"commons-beanutils-1.7.0.jar",
"stax-api-1.0.1.jar",
"commons-beanutils-core-1.8.0.jar",
"servlet-api-2.5.jar",
"jsp-api-2.1.jar"
)
cp filter { x => exclude(x.data.getName) }
}
)
lazy val publishSettings = S3Resolver.defaults ++ Seq(
publishTo := Some(s3resolver.value(
"Mesosphere Public Repo (S3)",
s3("downloads.mesosphere.com/maven")
))
)
lazy val formatSettings = scalariformSettings ++ Seq(
ScalariformKeys.preferences := FormattingPreferences()
.setPreference(IndentWithTabs, false)
.setPreference(IndentSpaces, 2)
.setPreference(AlignParameters, true)
.setPreference(DoubleIndentClassDeclaration, true)
.setPreference(MultilineScaladocCommentsStartOnFirstLine, false)
.setPreference(PlaceScaladocAsterisksBeneathSecondAsterisk, true)
.setPreference(PreserveDanglingCloseParenthesis, true)
.setPreference(CompactControlReadability, true) //MV: should be false!
.setPreference(AlignSingleLineCaseStatements, true)
.setPreference(PreserveSpaceBeforeArguments, true)
.setPreference(SpaceBeforeColon, false)
.setPreference(SpaceInsideBrackets, false)
.setPreference(SpaceInsideParentheses, false)
.setPreference(SpacesWithinPatternBinders, true)
.setPreference(FormatXml, true)
)
}
object Dependencies {
import Dependency._
val root = Seq(
// runtime
akkaActor % "compile",
akkaSlf4j % "compile",
sprayClient % "compile",
sprayHttpx % "compile",
chaos % "compile",
mesosUtils % "compile",
jacksonCaseClass % "compile",
twitterCommons % "compile",
twitterZkClient % "compile",
jodaTime % "compile",
jodaConvert % "compile",
jerseyServlet % "compile",
jerseyMultiPart % "compile",
uuidGenerator % "compile",
jGraphT % "compile",
hadoopHdfs % "compile",
hadoopCommon % "compile",
beanUtils % "compile",
scallop % "compile",
playJson % "compile",
// test
Test.scalatest % "test",
Test.mockito % "test",
Test.akkaTestKit % "test"
)
}
object Dependency {
object V {
// runtime deps versions
val Chaos = "0.6.2"
val JacksonCCM = "0.1.2"
val MesosUtils = "0.21.0-1"
val Akka = "2.3.9"
val Spray = "1.3.2"
val TwitterCommons = "0.0.76"
val TwitterZKClient = "0.0.70"
val Jersey = "1.18.1"
val JodaTime = "2.3"
val JodaConvert = "1.6"
val UUIDGenerator = "3.1.3"
val JGraphT = "0.9.1"
val Hadoop = "2.4.1"
val Scallop = "0.9.5"
val PlayJson = "2.3.7"
// test deps versions
val Mockito = "1.9.5"
val ScalaTest = "2.1.7"
}
val excludeMortbayJetty = ExclusionRule(organization = "org.mortbay.jetty")
val excludeJavaxServlet = ExclusionRule(organization = "javax.servlet")
val akkaActor = "com.typesafe.akka" %% "akka-actor" % V.Akka
val akkaSlf4j = "com.typesafe.akka" %% "akka-slf4j" % V.Akka
val sprayClient = "io.spray" %% "spray-client" % V.Spray
val sprayHttpx = "io.spray" %% "spray-httpx" % V.Spray
val playJson = "com.typesafe.play" %% "play-json" % V.PlayJson
val chaos = "mesosphere" %% "chaos" % V.Chaos
val mesosUtils = "mesosphere" %% "mesos-utils" % V.MesosUtils
val jacksonCaseClass = "mesosphere" %% "jackson-case-class-module" % V.JacksonCCM
val jerseyServlet = "com.sun.jersey" % "jersey-servlet" % V.Jersey
val jerseyMultiPart = "com.sun.jersey.contribs" % "jersey-multipart" % V.Jersey
val jodaTime = "joda-time" % "joda-time" % V.JodaTime
val jodaConvert = "org.joda" % "joda-convert" % V.JodaConvert
val twitterCommons = "com.twitter.common.zookeeper" % "candidate" % V.TwitterCommons
val twitterZkClient = "com.twitter.common.zookeeper" % "client" % V.TwitterZKClient
val uuidGenerator = "com.fasterxml.uuid" % "java-uuid-generator" % V.UUIDGenerator
val jGraphT = "org.javabits.jgrapht" % "jgrapht-core" % V.JGraphT
val hadoopHdfs = "org.apache.hadoop" % "hadoop-hdfs" % V.Hadoop excludeAll(excludeMortbayJetty, excludeJavaxServlet)
val hadoopCommon = "org.apache.hadoop" % "hadoop-common" % V.Hadoop excludeAll(excludeMortbayJetty, excludeJavaxServlet)
val beanUtils = "commons-beanutils" % "commons-beanutils" % "1.9.2"
val scallop = "org.rogach" %% "scallop" % V.Scallop
object Test {
val scalatest = "org.scalatest" %% "scalatest" % V.ScalaTest
val mockito = "org.mockito" % "mockito-all" % V.Mockito
val akkaTestKit = "com.typesafe.akka" %% "akka-testkit" % V.Akka
}
}
| sielaq/marathon | project/build.scala | Scala | apache-2.0 | 8,156 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License
*/
package org.apache.toree.plugins.dependencies
import java.net.URL
import java.lang.{ClassLoader => JClassLoader}
import scala.reflect.internal.util.ScalaClassLoader
object ClassLoaderHelper {
def URLClassLoader(urls: Seq[URL], parent: JClassLoader): ScalaClassLoader.URLClassLoader = {
new scala.reflect.internal.util.ScalaClassLoader.URLClassLoader(urls, parent)
}
}
| lresende/incubator-toree | plugins/src/test/scala-2.12/org/apache/toree/plugins/dependencies/ClassLoaderHelper.scala | Scala | apache-2.0 | 1,193 |
/*
* Copyright 2020 Lenses.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.lenses.streamreactor.connect.aws.s3.storage
import java.io.ByteArrayOutputStream
class S3ByteArrayOutputStream extends S3OutputStream {
val wrappedOutputStream = new ByteArrayOutputStream()
var pointer: Long = 0L
override def complete: Boolean = true
override def getPointer: Long = pointer
override def write(b: Int): Unit = {
wrappedOutputStream.write(b)
pointer += 1
}
def toByteArray: Array[Byte] = wrappedOutputStream.toByteArray
override def toString: String = wrappedOutputStream.toString("UTF-8")
}
| datamountaineer/stream-reactor | kafka-connect-aws-s3/src/test/scala/io/lenses/streamreactor/connect/aws/s3/storage/S3ByteArrayOutputStream.scala | Scala | apache-2.0 | 1,146 |
package ly.stealth.phoenix
import java.io.File
import java.util
import java.util.UUID
import com.google.protobuf.ByteString
import ly.stealth.phoenix.Util.Str
import org.apache.log4j.Logger
import org.apache.mesos.Protos._
import org.apache.mesos.{MesosSchedulerDriver, SchedulerDriver}
import scala.collection.JavaConverters._
class Scheduler(val config: Config) extends org.apache.mesos.Scheduler {
private val logger = Logger.getLogger(this.getClass)
private var driver: SchedulerDriver = null
var cluster = new Cluster
def start() {
logger.info(s"Starting ${getClass.getSimpleName}")
val frameworkBuilder = FrameworkInfo.newBuilder()
frameworkBuilder.setUser("")
frameworkBuilder.setName(config.FrameworkName)
frameworkBuilder.setFailoverTimeout(30 * 24 * 3600)
frameworkBuilder.setCheckpoint(true)
val driver = new MesosSchedulerDriver(this, frameworkBuilder.build, config.MesosMaster)
val httpServer = new HttpServer(this)
httpServer.start()
Runtime.getRuntime.addShutdownHook(new Thread() {
override def run() {
if (driver != null) {
logger.info("Stopping driver")
driver.stop()
}
}
})
val status = if (driver.run eq Status.DRIVER_STOPPED) 0 else 1
System.exit(status)
}
override def registered(driver: SchedulerDriver, id: FrameworkID, master: MasterInfo) {
logger.info("[registered] framework:" + Str.id(id.getValue) + " master:" + Str.master(master))
this.driver = driver
}
override def offerRescinded(driver: SchedulerDriver, id: OfferID) {
logger.info("[offerRescinded] " + Str.id(id.getValue))
}
override def disconnected(driver: SchedulerDriver) {
logger.info("[disconnected]")
this.driver = null
}
override def reregistered(driver: SchedulerDriver, master: MasterInfo) {
logger.info("[reregistered] master:" + Str.master(master))
this.driver = driver
}
override def slaveLost(driver: SchedulerDriver, id: SlaveID) {
logger.info("[slaveLost] " + Str.id(id.getValue))
}
override def error(driver: SchedulerDriver, message: String) {
logger.info("[error] " + message)
}
override def statusUpdate(driver: SchedulerDriver, status: TaskStatus) {
logger.info("[statusUpdate] " + Str.taskStatus(status))
onServerStatus(driver, status)
}
override def frameworkMessage(driver: SchedulerDriver, executorId: ExecutorID, slaveId: SlaveID, data: Array[Byte]) {
logger.info("[frameworkMessage] executor:" + Str.id(executorId.getValue) + " slave:" + Str.id(slaveId.getValue) + " data: " + new String(data))
}
override def resourceOffers(driver: SchedulerDriver, offers: util.List[Offer]): Unit = {
logger.debug("[resourceOffers]\\n" + Str.offers(offers.asScala))
onResourceOffers(offers.asScala.toList)
}
private def onServerStatus(driver: SchedulerDriver, status: TaskStatus) {
val server = cluster.getServerByTaskId(status.getTaskId.getValue)
status.getState match {
case TaskState.TASK_RUNNING =>
onServerStarted(server, driver, status)
case TaskState.TASK_LOST | TaskState.TASK_FAILED | TaskState.TASK_ERROR =>
onServerFailed(server, status)
case TaskState.TASK_FINISHED | TaskState.TASK_KILLED => logger.info(s"Task ${status.getTaskId.getValue} has finished")
case _ => logger.warn("Got unexpected task state: " + status.getState)
}
}
private def onServerStarted(serverOpt: Option[Server], driver: SchedulerDriver, status: TaskStatus) {
serverOpt match {
case Some(server) if server.state != State.Running =>
logger.info(s"Changing server's (id=${server.id}) state ${server.state} -> ${State.Running}")
server.state = State.Running
case Some(server) => //status update for a running server
case None =>
logger.info(s"Got ${status.getState} for unknown/stopped server, killing task ${status.getTaskId}")
driver.killTask(status.getTaskId)
}
}
private def onServerFailed(serverOpt: Option[Server], status: TaskStatus) {
serverOpt match {
case Some(server) =>
logger.info(s"Changing server's (id=${server.id}) state ${server.state} -> ${State.Stopped}")
server.state = State.Stopped
case None => logger.info(s"Got ${status.getState} for unknown/stopped server with task ${status.getTaskId}")
}
}
private def onResourceOffers(offers: List[Offer]) {
offers.foreach { offer =>
val resourceOffer = ResourceOffer.fromMesosOffer(offer)
cluster.applicableRequest(resourceOffer) match {
case Some(server) =>
logger.info(s"Accepting offer $offer to satisfy ${server.requestTemplate}")
launchTask(offer, server)
case None =>
logger.debug(s"Declining offer ${offer.getId.getValue}")
driver.declineOffer(offer.getId)
}
}
}
private def launchTask(offer: Offer, server: Server) {
val mesosTask = newTask(offer, server.requestTemplate)
val attributes = offer.getAttributesList.asScala.toList.filter(_.hasText).map(attr => attr.getName -> attr.getText.getValue).toMap
val taskData = TaskData(mesosTask.getTaskId.getValue, mesosTask.getSlaveId.getValue, mesosTask.getExecutor.getExecutorId.getValue, attributes,
server.requestTemplate.cpus.getOrElse(0.5), server.requestTemplate.mem.getOrElse(256))
server.taskData = Some(taskData)
driver.launchTasks(util.Arrays.asList(offer.getId), util.Arrays.asList(mesosTask))
cluster.requestSatisfied(server)
logger.info(s"Launching task $taskData for offer ${offer.getId.getValue}")
}
private def newTask(offer: Offer, request: RequestTemplate): TaskInfo = {
def taskData: ByteString = {
val requiredProperties = Map(
"secor.kafka.topic_filter" -> config.SecorKafkaTopicFilter,
"aws.access.key" -> config.AwsAccessKey,
"aws.secret.key" -> config.AwsSecretKey,
"secor.s3.bucket" -> config.SecorS3Bucket,
"kafka.seed.broker.host" -> config.KafkaSeedBrokerHost,
"zookeeper.quorum" -> config.ZookeeperQuorum,
"secor.local.path" -> "./")
val data = Map("overrides" -> Util.formatMap(requiredProperties ++ request.configOverrides))
ByteString.copyFromUtf8(Util.formatMap(data))
}
val taskBuilder: TaskInfo.Builder = TaskInfo.newBuilder
.setName("task-" + UUID.randomUUID())
.setTaskId(TaskID.newBuilder.setValue(UUID.randomUUID().toString).build)
.setData(taskData)
.setSlaveId(offer.getSlaveId)
.setExecutor(newExecutor(request.id))
taskBuilder
.addResources(Resource.newBuilder.setName("cpus").setType(Value.Type.SCALAR).setScalar(Value.Scalar.newBuilder.setValue(request.cpus.getOrElse(0.2))))
.addResources(Resource.newBuilder.setName("mem").setType(Value.Type.SCALAR).setScalar(Value.Scalar.newBuilder.setValue(request.mem.getOrElse(256))))
taskBuilder.build
}
private def newExecutor(serverId: Int): ExecutorInfo = {
val cmd = "java -cp phoenix-0.1-SNAPSHOT.jar ly.stealth.phoenix.Executor"
val commandBuilder = CommandInfo.newBuilder
commandBuilder
.addUris(CommandInfo.URI.newBuilder().setValue(new File("dist/phoenix-0.1-SNAPSHOT.jar").getAbsolutePath))
.addUris(CommandInfo.URI.newBuilder().setValue(new File("dist/secor-0.2-SNAPSHOT-bin.tar.gz").getAbsolutePath).setExtract(true))
.setValue(cmd)
ExecutorInfo.newBuilder()
.setExecutorId(ExecutorID.newBuilder.setValue(serverId.toString))
.setCommand(commandBuilder)
.setName(s"secor-$serverId")
.build()
}
override def executorLost(driver: SchedulerDriver, executorId: ExecutorID, slaveId: SlaveID, status: Int) {
logger.info("[executorLost] executor:" + Str.id(executorId.getValue) + " slave:" + Str.id(slaveId.getValue) + " status:" + status)
}
def onAddServer(request: AddServerRequest): Unit = {
logger.info(s"Handling $request")
cluster.addRequest(request.asRequestTemplate)
}
def onDeleteServer(request: DeleteServerRequest): Unit = {
logger.info(s"Handling $request")
val taskIdOpt = cluster.getServer(request.id).flatMap(_.taskData.map(_.id))
taskIdOpt.foreach(taskId => driver.killTask(TaskID.newBuilder().setValue(taskId).build()))
cluster.deleteServer(request.id)
}
} | smorin/phoenix | src/main/scala/ly/stealth/phoenix/Scheduler.scala | Scala | apache-2.0 | 8,327 |
package org.automanlang.adapters.mturk.util
object XML {
def surveyAnswerFilter(n: scala.xml.Node, id: String): scala.xml.Node = {
val toRet = (n \\\\ "Answer").filter { a => // get rid of question form header
(a \\\\ "QuestionIdentifier").foldLeft(false) {
case (acc, q) => acc || q.text == id
}
}
toRet.head
}
}
| dbarowy/AutoMan | libautoman/src/main/scala/org/automanlang/adapters/mturk/util/XML.scala | Scala | gpl-2.0 | 347 |
/*
* The MIT License (MIT)
*
* Copyright (c) 2013 Association du Paris Java User Group.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of
* this software and associated documentation files (the "Software"), to deal in
* the Software without restriction, including without limitation the rights to
* use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so,
* subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
* FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
* COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
* IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package library
/**
* Created by nicolas on 15/03/2014.
*/
case class VCard(firstName: Option[String],lastName: Option[String], company: Option[String], email: Option[String], website: Option[String],
phonenumber:Option[String], title:Option[String]) {
val NAME = "N:"
val COMPANY = "ORG:"
val TITLE = "TITLE:"
val PHONE = "TEL:"
val WEB = "URL:"
val EMAIL = "EMAIL:"
val ADDRESS = "ADR:"
override def toString: String = {
val sb = new StringBuffer()
sb.append("BEGIN:VCARD\\n")
if (firstName.isDefined || lastName.isDefined) {
sb.append(NAME + lastName.map(s=>s+";").getOrElse("") + firstName.getOrElse(""))
}
if (company.isDefined) {
sb.append("\\n" + COMPANY + company.get)
}
if (title.isDefined) {
sb.append("\\n" + TITLE + title.get)
}
if (phonenumber.isDefined) {
sb.append("\\n" + PHONE + phonenumber.get)
}
if (website.isDefined) {
sb.append("\\n" + WEB + website.get)
}
if (email.isDefined) {
sb.append("\\n" + EMAIL + email.get)
}
sb.append("\\nEND:VCARD")
sb.toString
}
} | CodeursEnSeine/cfp-2015 | app/library/VCard.scala | Scala | mit | 2,283 |
package servlet
import org.eclipse.jgit.http.server.GitServlet
import org.eclipse.jgit.lib._
import org.eclipse.jgit.transport._
import org.eclipse.jgit.transport.resolver._
import org.slf4j.LoggerFactory
import javax.servlet.ServletConfig
import javax.servlet.ServletContext
import javax.servlet.http.HttpServletRequest
import util.{StringUtil, Keys, JGitUtil, Directory}
import util.ControlUtil._
import util.Implicits._
import service._
import WebHookService._
import org.eclipse.jgit.api.Git
import util.JGitUtil.CommitInfo
/**
* Provides Git repository via HTTP.
*
* This servlet provides only Git repository functionality.
* Authentication is provided by [[servlet.BasicAuthenticationFilter]].
*/
class GitRepositoryServlet extends GitServlet {
private val logger = LoggerFactory.getLogger(classOf[GitRepositoryServlet])
override def init(config: ServletConfig): Unit = {
setReceivePackFactory(new GitBucketReceivePackFactory())
// TODO are there any other ways...?
super.init(new ServletConfig(){
def getInitParameter(name: String): String = name match {
case "base-path" => Directory.RepositoryHome
case "export-all" => "true"
case name => config.getInitParameter(name)
}
def getInitParameterNames(): java.util.Enumeration[String] = {
config.getInitParameterNames
}
def getServletContext(): ServletContext = config.getServletContext
def getServletName(): String = config.getServletName
})
super.init(config)
}
}
class GitBucketReceivePackFactory extends ReceivePackFactory[HttpServletRequest] {
private val logger = LoggerFactory.getLogger(classOf[GitBucketReceivePackFactory])
override def create(request: HttpServletRequest, db: Repository): ReceivePack = {
val receivePack = new ReceivePack(db)
val pusher = request.getAttribute(Keys.Request.UserName).asInstanceOf[String]
logger.debug("requestURI: " + request.getRequestURI)
logger.debug("pusher:" + pusher)
defining(request.paths){ paths =>
val owner = paths(1)
val repository = paths(2).replaceFirst("\\\\.git$", "")
val baseURL = request.getRequestURL.toString.replaceFirst("/git/.*", "")
logger.debug("repository:" + owner + "/" + repository)
logger.debug("baseURL:" + baseURL)
if(!repository.endsWith(".wiki")){
receivePack.setPostReceiveHook(new CommitLogHook(owner, repository, pusher, baseURL))
}
receivePack
}
}
}
import scala.collection.JavaConverters._
class CommitLogHook(owner: String, repository: String, pusher: String, baseURL: String) extends PostReceiveHook
with RepositoryService with AccountService with IssuesService with ActivityService with PullRequestService with WebHookService {
private val logger = LoggerFactory.getLogger(classOf[CommitLogHook])
def onPostReceive(receivePack: ReceivePack, commands: java.util.Collection[ReceiveCommand]): Unit = {
try {
using(Git.open(Directory.getRepositoryDir(owner, repository))) { git =>
commands.asScala.foreach { command =>
logger.debug(s"commandType: ${command.getType}, refName: ${command.getRefName}")
val commits = command.getType match {
case ReceiveCommand.Type.DELETE => Nil
case _ => JGitUtil.getCommitLog(git, command.getOldId.name, command.getNewId.name)
}
val refName = command.getRefName.split("/")
val branchName = refName.drop(2).mkString("/")
// Extract new commit and apply issue comment
val newCommits = if(commits.size > 1000){
val existIds = getAllCommitIds(owner, repository)
commits.flatMap { commit =>
if(!existIds.contains(commit.id)){
createIssueComment(commit)
Some(commit)
} else None
}
} else {
commits.flatMap { commit =>
if(!existsCommitId(owner, repository, commit.id)){
createIssueComment(commit)
Some(commit)
} else None
}
}
// batch insert all new commit id
insertAllCommitIds(owner, repository, newCommits.map(_.id))
// record activity
if(refName(1) == "heads"){
command.getType match {
case ReceiveCommand.Type.CREATE => recordCreateBranchActivity(owner, repository, pusher, branchName)
case ReceiveCommand.Type.UPDATE => recordPushActivity(owner, repository, pusher, branchName, newCommits)
case ReceiveCommand.Type.DELETE => recordDeleteBranchActivity(owner, repository, pusher, branchName)
case _ =>
}
} else if(refName(1) == "tags"){
command.getType match {
case ReceiveCommand.Type.CREATE => recordCreateTagActivity(owner, repository, pusher, branchName, newCommits)
case ReceiveCommand.Type.DELETE => recordDeleteTagActivity(owner, repository, pusher, branchName, newCommits)
case _ =>
}
}
if(refName(1) == "heads"){
command.getType match {
case ReceiveCommand.Type.CREATE |
ReceiveCommand.Type.UPDATE |
ReceiveCommand.Type.UPDATE_NONFASTFORWARD =>
updatePullRequests(branchName)
case _ =>
}
}
// call web hook
getWebHookURLs(owner, repository) match {
case webHookURLs if(webHookURLs.nonEmpty) =>
for(pusherAccount <- getAccountByUserName(pusher);
ownerAccount <- getAccountByUserName(owner);
repositoryInfo <- getRepository(owner, repository, baseURL)){
callWebHook(owner, repository, webHookURLs,
WebHookPayload(git, pusherAccount, command.getRefName, repositoryInfo, newCommits, ownerAccount))
}
case _ =>
}
}
}
// update repository last modified time.
updateLastActivityDate(owner, repository)
} catch {
case ex: Exception => {
logger.error(ex.toString, ex)
throw ex
}
}
}
private def createIssueComment(commit: CommitInfo) = {
StringUtil.extractIssueId(commit.fullMessage).foreach { issueId =>
if(getIssue(owner, repository, issueId).isDefined){
getAccountByMailAddress(commit.mailAddress).foreach { account =>
createComment(owner, repository, account.userName, issueId.toInt, commit.fullMessage + " " + commit.id, "commit")
}
}
}
}
/**
* Fetch pull request contents into refs/pull/${issueId}/head and update pull request table.
*/
private def updatePullRequests(branch: String) =
getPullRequestsByRequest(owner, repository, branch, false).foreach { pullreq =>
if(getRepository(pullreq.userName, pullreq.repositoryName, baseURL).isDefined){
using(Git.open(Directory.getRepositoryDir(pullreq.userName, pullreq.repositoryName))){ git =>
git.fetch
.setRemote(Directory.getRepositoryDir(owner, repository).toURI.toString)
.setRefSpecs(new RefSpec(s"refs/heads/${branch}:refs/pull/${pullreq.issueId}/head").setForceUpdate(true))
.call
val commitIdTo = git.getRepository.resolve(s"refs/pull/${pullreq.issueId}/head").getName
updateCommitIdTo(pullreq.userName, pullreq.repositoryName, pullreq.issueId, commitIdTo)
}
}
}
}
| odz/gitbucket | src/main/scala/servlet/GitRepositoryServlet.scala | Scala | apache-2.0 | 7,556 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.dllib.nn.mkldnn
import breeze.linalg.Axis._1
import breeze.linalg.dim
import com.intel.analytics.bigdl.Module
import com.intel.analytics.bigdl.dllib.feature.dataset.MiniBatch
import com.intel.analytics.bigdl.mkl.{MKL, Memory}
import com.intel.analytics.bigdl.dllib.nn.{DetectionOutputSSD, PriorBox}
import com.intel.analytics.bigdl.dllib.nn.abstractnn.{AbstractModule, Activity, DataFormat, TensorModule}
import com.intel.analytics.bigdl.dllib.nn.mkldnn.Phase.{InferencePhase, TrainingPhase}
import com.intel.analytics.bigdl.dllib.tensor.Tensor
import com.intel.analytics.bigdl.dllib.tensor.TensorNumericMath.TensorNumeric
import com.intel.analytics.bigdl.dllib.utils.Engine
import com.intel.analytics.bigdl.dllib.utils.{Util => NNUtils, _}
import com.intel.analytics.bigdl.dllib.utils._
import org.apache.logging.log4j.LogManager
/**
* wrap blas module to dnn module,
* and the module should have implemented "computeOutputShape" func.
* @param module
*/
private[bigdl] class BlasWrapper(val module: AbstractModule[Activity, Activity, Float])
extends MklDnnLayer {
require(!module.isInstanceOf[MklDnnModule], "Only support wrapper blas layer to dnn layer")
output = module.output
gradInput = module.gradInput
// reminder: for dim 3, there may be ntc or tnc, now we just support ntc
private def getFormats(dims: Int): Int = {
dims match {
case 4 => Memory.Format.nchw
case 3 => Memory.Format.ntc
case 2 => Memory.Format.nc
case 1 => Memory.Format.x
case _ => throw new UnsupportedOperationException(s"UnSupported dims ${dims}")
}
}
private def getHeapFormats(in: MemoryData): Int = {
if (in.heapFormat == -1 || in.shape.length != 4) {
getFormats(in.shape.length)
} else in.heapFormat
}
private[mkldnn] var needOutputFormats: Boolean = true
@transient private lazy val logger = LogManager.getLogger(getClass)
@transient private var subModels: Array[Module[Float]] = _
@transient private var subModelNumber : Int = 1
@transient private var withMultiThread: Boolean = false
@transient private var inputBuffer : Array[Activity] = _
@transient private var tensorBuffer : Array[Tensor[Float]] = _
@transient private var batchSize : Int = _
@transient private var initEnv: Boolean = false
private def inferInputFormats(inputs: Array[MemoryData]): Array[MemoryData] = {
inputs.map(in => {
val heap = if (in.layout == Memory.Format.tnc) {
val size = in.shape
HeapData(Array(size(1), size(0), size(2)), Memory.Format.ntc)
} else {
HeapData(in.shape, getHeapFormats(in))
}
heap.setHeapFormat(in.heapFormat)
})
}
private def inferOutputFormats(inputs: Array[MemoryData]): Array[MemoryData] = {
val inputShape = inputs.map(in => Shape(in.getHeapShape()))
val outputShape = if (inputShape.length == 1) {
List(module.computeOutputShape(inputShape(0)))
} else {
// multi shape
val out = module.computeOutputShape(MultiShape(inputShape.toList))
if (out.isInstanceOf[MultiShape]) out.toMulti() else List(out)
}
outputShape.map(in => {
val size = in.toSingle().toArray
val f = if (size.length == 4 && inputs(0).heapFormat == Memory.Format.nhwc) {
Memory.Format.nhwc
} else getFormats(size.length)
val outSize = if (f == Memory.Format.nhwc) {
Array(size(0), size(3), size(1), size(2))
} else size
HeapData(outSize, f).setHeapFormat(f)
}).toArray
}
/**
* Blas layers normally do not have competitive performance when running under mkldnn.
* So we can leverage multi-threading to resolve bottleneck introduced by one model only
* for mkl-dnn backend. The parallelism is determined by both bath size and core number,
* with restrictions that both input and output format must be batched.
*/
private def setMultiThreadEnv(input: Activity): Unit = {
initEnv = true
val multiThread = System.getProperty("multiThread", "false").toBoolean
if (this.train && multiThread) {
throw new IllegalArgumentException("Please not set multiThread to true for model training")
}
if (this.train
|| !multiThread
|| (_outputFormats != null && _outputFormats.length != 1)
|| (_outputFormats != null && _inputFormats != null
&& _inputFormats(0).shape(0) != _outputFormats(0).shape(0))
|| !flattenInput(input)
) {
return
}
batchSize = tensorBuffer(0).size(1)
val residue = batchSize % Engine.coreNumber()
if (residue != 0 || batchSize < 2 || Engine.coreNumber() < 2) {
logger.warn("If you want to use multiThread property to speed up, " +
"please attention core number should be greater than 1, " +
s"batch size should be greater than 1 and divided by core number, " +
s"but now get core number ${Engine.coreNumber()} batch size ${batchSize}")
return
}
subModelNumber = Engine.coreNumber()
initModules()
withMultiThread = true
}
private def flattenInput(input: Activity): Boolean = {
val inputDepth = if (input.isTensor) 1 else input.toTable.length()
if (tensorBuffer == null) tensorBuffer = new Array[Tensor[Float]](inputDepth)
var batch : Int = 0
if (inputDepth == 1) {
tensorBuffer(0) = input.toTensor[Float]
} else {
val in = input.toTable
for (i <- 1 to in.length()) {
if (in.get(i).get.isInstanceOf[Table]) return false
tensorBuffer(i - 1) = in.get[Tensor[Float]](i).get
if (i == 1) batch = tensorBuffer(i - 1).size(1)
// reminder: inputs for DetectionOutputSSD are not all in batch,
// but the non-batched input can be shared in all batch. So this layer can be paralleled.
if (batch != tensorBuffer(i - 1).size(1)
&& !module.isInstanceOf[DetectionOutputSSD[Float]]) {
return false
}
}
}
true
}
private def initModules(): Unit = {
subModels = if (module.parameters() != null) {
val wb = NNUtils.getAndClearWeightBias(module.parameters())
val models = (1 to subModelNumber).map(i => {
val m = module.cloneModule()
NNUtils.putWeightBias(wb, m)
m.asInstanceOf[Module[Float]]
}).toArray
NNUtils.putWeightBias(wb, module)
models
} else {
val models = (1 to subModelNumber).map(i => {
val m = module.cloneModule()
m.asInstanceOf[Module[Float]]
}).toArray
models
}
}
override private[mkldnn] def initFwdPrimitives(inputs: Array[MemoryData], phase: Phase) = {
_inputFormats = inferInputFormats(inputs)
_outputFormats = if (needOutputFormats) inferOutputFormats(_inputFormats) else null
if (_outputFormats != null) {
_outputFormats.map(_.getPrimitive(runtime))
}
(_inputFormats, _outputFormats)
}
override private[mkldnn] def initBwdPrimitives(grad: Array[MemoryData], phase: Phase) = {
_gradOutputFormats = _outputFormats
_gradInputFormats = _inputFormats
(_gradOutputFormats, _gradInputFormats)
}
override private[mkldnn] def initGradWPrimitives(grad: Array[MemoryData], phase: Phase) = {
_gradOutputFormatsForWeight = _outputFormats
_gradOutputFormatsForWeight
}
private def getInput(dim: Int, index: Int, size: Int): Activity = {
if (tensorBuffer.length == 1) {
tensorBuffer(0).narrow(dim, index, size)
} else {
// the third tensor of inputs for DetectionOutputSSD is not in batch,
// but it can be shared with all batch.
if (module.isInstanceOf[DetectionOutputSSD[Float]]) {
T(tensorBuffer(0).narrow(dim, index, size),
tensorBuffer(1).narrow(dim, index, size), tensorBuffer(2))
} else {
T.array(tensorBuffer.map(_.narrow(dim, index, size)))
}
}
}
private def forwardInParallel(input: Activity): Activity = {
if (inputBuffer == null) inputBuffer = new Array[Activity](subModelNumber)
val stackSize = batchSize / subModelNumber
val tasks = Engine.wrapperComputing.invoke((0 until subModelNumber).map(i =>
() => inputBuffer(i) = getInput(1, i * stackSize + 1, stackSize)))
Engine.wrapperComputing.sync(tasks)
val forwardThreads = Engine.wrapperComputing.invoke((0 until subModelNumber).map(i =>
() => subModels(i).forward(inputBuffer(i)).toTensor[Float]))
Engine.wrapperComputing.sync(forwardThreads)
if (subModels(0).output.isTable) {
withMultiThread = false
module.forward(input)
} else {
val subOutSize = subModels(0).output.toTensor[Float].size()
if (subOutSize(0) != stackSize) {
withMultiThread = false
module.forward(input)
} else {
subOutSize(0) = batchSize
if (output == null || output.toTensor[Float].isEmpty) {
output = Tensor[Float]().resize(subOutSize)
}
val copyThreads = Engine.wrapperComputing.invoke((0 until subModelNumber).map(i =>
() => {
output.toTensor[Float].narrow(1, i * stackSize + 1, stackSize)
.copy(subModels(i).output.toTensor[Float])
}))
Engine.wrapperComputing.sync(copyThreads)
output
}
}
}
override def updateOutput(input: Activity): Activity = {
if (!initEnv) setMultiThreadEnv(input)
output = if (withMultiThread) {
forwardInParallel(input)
} else {
module.forward(input)
}
output
}
override def updateGradInput(input: Activity, gradOutput: Activity): Activity = {
gradInput = module.updateGradInput(input, gradOutput)
gradInput
}
override def accGradParameters(input: Activity, gradOutput: Activity): Unit = {
module.accGradParameters(input, gradOutput)
}
override def clearState() : this.type = {
super.clearState()
module.clearState()
this
}
override def parameters(): (Array[Tensor[Float]], Array[Tensor[Float]]) = {
module.parameters()
}
override def equals(obj: Any): Boolean = {
if (!super.equals(obj) || !obj.isInstanceOf[BlasWrapper]) {
return false
}
val other = obj.asInstanceOf[BlasWrapper]
if (this.eq(other)) {
return true
}
if (module != other) return false
true
}
override def hashCode() : Int = {
val seed = 37
var hash = super.hashCode()
hash = hash * seed + module.hashCode()
hash
}
override def training(): this.type = {
train = true
module.training()
this
}
/**
* Set the module to evaluate mode
* @return
*/
override def evaluate(): this.type = {
train = false
module.evaluate()
this
}
override def release(): Unit = module.release()
}
private[bigdl] object BlasWrapper {
def apply(module: AbstractModule[Activity, Activity, Float]): BlasWrapper =
new BlasWrapper(module)
}
| intel-analytics/BigDL | scala/dllib/src/main/scala/com/intel/analytics/bigdl/dllib/nn/mkldnn/BlasWrapper.scala | Scala | apache-2.0 | 11,469 |
/**
* Copyright (C) 2010-2011 LShift Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.lshift.diffa.kernel.differencing
import org.slf4j.{Logger, LoggerFactory}
import net.lshift.diffa.kernel.events.VersionID
import net.lshift.diffa.kernel.util.MissingObjectException
import net.lshift.diffa.kernel.lifecycle.{NotificationCentre, AgentLifecycleAware}
import net.lshift.diffa.kernel.config.system.SystemConfigStore
import net.lshift.diffa.kernel.config.{PairRef, DiffaPairRef, Endpoint, DomainConfigStore}
import org.joda.time.{DateTime, Interval}
import net.lshift.diffa.kernel.frontend.DomainPairDef
import net.lshift.diffa.kernel.escalation.EscalationHandler
/**
* Standard implementation of the DifferencesManager.
*
* Terminology:
* - Pending events are events that have resulted in differences, but the matching manager is still waiting for a
* timeout on;
* - Reportable events are events that have resulted in differences, and the matching manager has expired its window for it;
*
* Events sent to clients all have sequence identifiers, allowing clients to incrementally update. Internally, the
* differences manager will not allocate a sequence number for an event until an event goes reportable, since many events
* are likely to be generated internally in normal flows that will never be shown externally (eg, a message sent from
* A -> B will likely be marked as mismatched by the differencing engine for a short period of time, but be suppressed
* whilst the matching manager waits for it to expire).
*/
class DefaultDifferencesManager(
val systemConfig:SystemConfigStore,
val domainConfig:DomainConfigStore,
val domainDifferenceStore:DomainDifferenceStore,
val differenceListener:DifferencingListener,
val escalationHandler:EscalationHandler)
extends DifferencesManager
with DifferencingListener with AgentLifecycleAware {
private val log:Logger = LoggerFactory.getLogger(getClass)
//private val participants = new HashMap[Endpoint, Participant]
// Subscribe to events from the matching manager
//matching.addListener(this)
//
// DifferencesManager Implementation
//
def createDifferenceWriter(space:Long, pair:String, overwrite: Boolean) = new DifferenceWriter {
// Record when we started the write so all differences can be tagged
val writerStart = new DateTime
val pairRef = PairRef(pair,space)
var latestStoreVersion:Long = domainDifferenceStore.lastRecordedVersion(pairRef) match {
case Some(version) => version
case None => 0L
}
def writeMismatch(id: VersionID, lastUpdate: DateTime, upstreamVsn: String, downstreamVsn: String, origin: MatchOrigin, storeVersion:Long) {
onMismatch(id, lastUpdate, upstreamVsn, downstreamVsn, origin, Unfiltered)
if (storeVersion > latestStoreVersion) {
latestStoreVersion = storeVersion
}
}
/*
def evictTombstones(tombstones:Iterable[Correlation]) {
tombstones.foreach(t => onMatch(t.asVersionID, t.upstreamVsn, TriggeredByScan))
}
*/
def abort() {
// Nothing to do
}
def close() {
domainDifferenceStore.recordLatestVersion(pairRef, latestStoreVersion)
}
}
def retrieveDomainSequenceNum(space:Long) = domainDifferenceStore.currentSequenceId(space)
def retrieveAggregates(pair:PairRef, start:DateTime, end:DateTime, aggregation:Option[Int]) =
domainDifferenceStore.retrieveAggregates(pair, start, end, aggregation)
def ignoreDifference(space:Long, seqId:String) = {
domainDifferenceStore.ignoreEvent(space, seqId)
}
def unignoreDifference(space:Long, seqId:String) = {
domainDifferenceStore.unignoreEvent(space, seqId)
}
def lastRecordedVersion(pair:PairRef) = domainDifferenceStore.lastRecordedVersion(pair)
def retrieveAllEventsInInterval(space:Long, interval:Interval) =
domainDifferenceStore.retrieveUnmatchedEvents(space, interval)
def retrievePagedEvents(space:Long, pairKey:String, interval:Interval, offset:Int, length:Int, options:EventOptions) =
domainDifferenceStore.retrievePagedEvents(PairRef(name = pairKey, space = space), interval, offset, length, options)
def countEvents(space:Long, pairKey: String, interval: Interval) =
domainDifferenceStore.countUnmatchedEvents(PairRef(name = pairKey, space = space), interval.getStart, interval.getEnd)
/*
def retrieveEventDetail(space:Long, evtSeqId:String, t: ParticipantType.ParticipantType) = {
log.trace("Requested a detail query for domain (" + space + ") and seq (" + evtSeqId + ") and type (" + t + ")")
// TODO This really needs refactoring :-(
t match {
case ParticipantType.UPSTREAM => {
withValidEvent(space, evtSeqId,
{e:DifferenceEvent => e.upstreamVsn != null},
{p:DomainPairDef => p.upstreamName},
participantFactory.createUpstreamParticipant)
}
case ParticipantType.DOWNSTREAM => {
withValidEvent(space, evtSeqId,
{e:DifferenceEvent => e.downstreamVsn != null},
{p:DomainPairDef => p.downstreamName},
participantFactory.createDownstreamParticipant)
}
}
}
*/
// TODO The fact that 3 lambdas are passed in probably indicates bad factoring
// -> the adapter factory call is probably low hanging fruit for refactoring
/*
private def withValidEvent(space:Long, evtSeqId:String,
check:Function1[DifferenceEvent,Boolean],
resolve:(DomainPairDef) => String,
p:(Endpoint, PairRef) => Participant): String = {
val event = domainDifferenceStore.getEvent(space, evtSeqId)
check(event) match {
case false => "Expanded detail not available"
case true => {
val id = event.objId
val pair = domainConfig.getPairDef(id.pair)
val endpointName = resolve(pair)
val endpoint = domainConfig.getEndpoint(space, endpointName)
if (endpoint.contentRetrievalUrl != null) {
if (!participants.contains(endpoint)) {
participants(endpoint) = p(endpoint, pair.asRef)
}
val participant = participants(endpoint)
participant.retrieveContent(id.id)
} else {
"Content retrieval not supported"
}
}
}
}
*/
//
// Lifecycle Management
//
override def onAgentInstantiationCompleted(nc: NotificationCentre) {
nc.registerForDifferenceEvents(this, Unfiltered)
}
//
// Differencing Input
//
/**
* This is the callback that channels mismatch events from the version policy into the domain cache.
*
* Queries the matching manager to see if it is actively monitoring this VersionID (ie, it has unexpired events around it).
* If yes -> just record it as a pending event. Don't tell clients anything yet.
* If no -> this is a reportable event. Record it in the active list, and emit an event to our clients.
*/
def onMismatch(id: VersionID, lastUpdate:DateTime, upstreamVsn: String, downstreamVsn: String, origin:MatchOrigin, level:DifferenceFilterLevel) = {
log.trace("Processing mismatch for " + id + " with upstreamVsn '" + upstreamVsn + "' and downstreamVsn '" + downstreamVsn + "'")
/*
matching.getMatcher(id.pair) match {
case Some(matcher) => {
matcher.isVersionIDActive(id) match {
case true => reportPending(id, lastUpdate, upstreamVsn, downstreamVsn, origin)
case false => reportUnmatched(id, lastUpdate, upstreamVsn, downstreamVsn, origin)
}
}
case None => {
// If no matcher is configured, then report mis-matches immediately
reportUnmatched(id, lastUpdate, upstreamVsn, downstreamVsn, origin)
}
}
*/
}
/**
* This is the callback that channels match events from the version policy into the domain cache.
* If the ID is currently in our list of reportable events, generate a match event to reverse it,
* and end the reportable unmatched event.
* If the ID is current in our list of pending events, then just end the id from our list of events.
* If we don't know about this id (no mismatches for this id reported), just ignore.
*/
def onMatch(id: VersionID, vsn: String, origin:MatchOrigin) {
if (log.isTraceEnabled) {
log.trace("Processing match for " + id + " with version '" + vsn + "'")
}
addMatched(id, vsn)
}
//
// Matching Status Input
//
def onDownstreamExpired(id: VersionID, vsn: String) = upgradePending(id)
def onUpstreamExpired(id: VersionID, vsn: String) = upgradePending(id)
/**
* This event is unimportant from the perspective of maintaining the domain, hence just drop it
*/
def onPaired(id: VersionID, vsn: String) = cancelPending(id, vsn)
//
// Configuration Change Notifications
//
// Internal plumbing
/**
* When pairs are updated, perform a differencing run to scan with their status.
*/
def onUpdatePair(pairRef: PairRef) {
}
def onDeletePair(pair: PairRef) {
domainDifferenceStore.removePair(pair)
}
def onUpdateDomain(space:Long) {
}
def onDeleteDomain(space:Long) {
domainDifferenceStore.removeDomain(space)
}
//
// Visible Difference Reporting
//
def reportPending(id:VersionID, lastUpdate:DateTime, upstreamVsn: String, downstreamVsn: String, origin: MatchOrigin) {
log.trace("Report pending for %s at %s, upstream %s, downstream %s, origin %s".format(id,lastUpdate, upstreamVsn, downstreamVsn, origin))
// TODO: Record origin as well
domainDifferenceStore.addPendingUnmatchedEvent(id, lastUpdate, upstreamVsn, downstreamVsn, new DateTime)
// TODO: Generate external event for pending difference?
}
def reportUnmatched(id:VersionID, lastUpdate:DateTime, upstreamVsn: String, downstreamVsn: String, origin: MatchOrigin) {
log.trace("Report unmatched for %s at %s, upstream %s, downstream %s, origin %s".format(id,lastUpdate, upstreamVsn, downstreamVsn, origin))
val (status, event) = domainDifferenceStore.addReportableUnmatchedEvent(id, lastUpdate, upstreamVsn, downstreamVsn, new DateTime)
differenceListener.onMismatch(id, lastUpdate, upstreamVsn, downstreamVsn, origin, MatcherFiltered)
status match {
case NewUnmatchedEvent | ReturnedUnmatchedEvent => escalationHandler.initiateEscalation(event)
case _ => // The event is either unchanged or just updated. Don't start escalation.
}
}
def addMatched(id:VersionID, vsn:String) {
log.trace("Add matched, id = %s, vsn = %s".format(id,vsn))
domainDifferenceStore.addMatchedEvent(id, vsn)
// TODO: Generate external event for matched? (Interested parties will already have seen the raw event)
}
def upgradePending(id:VersionID) {
val evt = domainDifferenceStore.upgradePendingUnmatchedEvent(id)
if (evt != null) {
log.trace("Processing upgrade from pending to unmatched for " + id)
differenceListener.onMismatch(id, evt.detectedAt, evt.upstreamVsn, evt.downstreamVsn, LiveWindow, MatcherFiltered)
} else {
log.trace("Skipped upgrade from pending to unmatched for " + id + " as the event was not pending")
}
}
def cancelPending(id:VersionID, vsn:String) {
val wasDeleted = domainDifferenceStore.cancelPendingUnmatchedEvent(id, vsn)
if (wasDeleted) {
log.trace("Cancelling pending event for " + id)
}
}
}
| 0x6e6562/diffa | kernel/src/main/scala/net/lshift/diffa/kernel/differencing/DefaultDifferencesManager.scala | Scala | apache-2.0 | 11,996 |
/**
* Digi-Lib-Test - various test helpers for Digi components
*
* Copyright (c) 2012-2013 Alexey Aksenov [email protected]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.digimead.lib.test
import java.io.{ BufferedInputStream, ByteArrayInputStream, File, FileInputStream, FileOutputStream, FileWriter, InputStream, OutputStream }
import java.math.BigInteger
import java.nio.channels.FileChannel
import java.security.{ DigestInputStream, MessageDigest }
import scala.annotation.tailrec
/**
* Add a file routines to testing infrastructure
*/
trait StorageHelper {
/** Recursively copy a folder or a file */
def copy(from: File, to: File): Unit =
if (from.isDirectory())
Option(from.listFiles()) match {
case Some(files) β
to.mkdirs()
files.foreach(file β copy(file, new File(to, file.getName())))
case None β
}
else
copyFile(from, to)
/** Copy a file */
def copyFile(sourceFile: File, destFile: File): Boolean = {
if (!destFile.exists())
destFile.createNewFile()
var source: FileChannel = null
var destination: FileChannel = null
try {
source = new FileInputStream(sourceFile).getChannel()
destination = new FileOutputStream(destFile).getChannel()
destination.transferFrom(source, 0, source.size())
} finally {
if (source != null) {
source.close()
}
if (destination != null) {
destination.close()
}
}
sourceFile.length == destFile.length
}
/** Recursively delete a folder or delete a file */
def deleteFolder(folder: File): Unit = {
assert(folder != null, "folder must be non-null")
for (f β Option(folder.listFiles) getOrElse { Helper.logwarn(getClass, "Folder %s does not exist or not a file".format(folder)); Array[File]() }) {
if (f.isDirectory) {
deleteFolder(f)
} else {
f.delete
}
}
folder.delete
}
/** Calculate digest for a byte array. */
def digest(data: Array[Byte]): Option[String] = digest(data, "SHA-1")
/** Calculate digest for a byte array. */
def digest(data: Array[Byte], algorithm: String): Option[String] =
digest(new ByteArrayInputStream(data), algorithm)
/** Calculate digest for a file. */
def digest(file: File): Option[String] = digest(file, "SHA-1")
/** Calculate digest for a file. */
def digest(file: File, algorithm: String): Option[String] =
digest(new BufferedInputStream(new FileInputStream(file)), algorithm)
/** Calculate digest for a stream and close it. */
def digest(stream: InputStream, algorithm: String = "SHA-1"): Option[String] = {
val md = MessageDigest.getInstance(algorithm)
var is: InputStream = stream
try {
is = new DigestInputStream(is, md)
val buffer = new Array[Byte](1024)
var read = is.read(buffer)
while (read != -1)
read = is.read(buffer)
} catch {
case e: Throwable β
Helper.logwarn(getClass, "Unable to calculate digest: " + e.getMessage())
return None
} finally {
is.close()
}
val bigInt = new BigInteger(1, md.digest())
Some(String.format("%32s", bigInt.toString(16)).replace(' ', '0'))
}
/** Write to a file */
def writeToFile(file: File, text: String) {
val fw = new FileWriter(file)
try { fw.write(text) }
finally { fw.close }
}
/** Write to a stream */
def writeToStream(in: InputStream, out: OutputStream, bufferSize: Int = 8192) {
val buffer = new Array[Byte](bufferSize)
@tailrec
def next(exit: Boolean = false) {
if (exit) {
in.close()
out.close()
return
}
val read = in.read(buffer)
if (read > 0)
out.write(buffer, 0, read)
next(read == -1)
}
next()
}
/** Allocate temporary folder for code block */
def withTempFolder[T](f: (File) β T): Unit = {
val tempFolder = System.getProperty("java.io.tmpdir")
var folder: File = null
do {
folder = new File(tempFolder, "scala-test-" + System.currentTimeMillis)
} while (!folder.mkdir)
try {
f(folder)
} finally {
deleteFolder(folder)
}
}
/** Iterate over directory recursively */
def visitPath[T](path: File, visitor: File β T) {
val list = path.listFiles()
if (list == null) return
for (f β list) {
if (f.isDirectory())
visitPath(f, visitor)
visitor(f)
}
}
}
| ezh/digi-lib-test | src/main/scala/org/digimead/lib/test/StorageHelper.scala | Scala | apache-2.0 | 4,957 |
package com.ctask.utils
import org.scalatest.concurrent.Conductors
import org.scalatest.{BeforeAndAfterEach, FlatSpec, Matchers}
/**
* Spec file for UniqueId
*/
class UniqueIdSpec extends FlatSpec with Matchers with Conductors with BeforeAndAfterEach {
override def afterEach(): Unit = {
super.afterEach()
UniqueId.reset()
}
behavior of "UniqueId"
it should "generate monotonically increasing ids" in {
UniqueId.getUniqueId shouldBe 1
UniqueId.getUniqueId shouldBe 2
UniqueId.getUniqueId shouldBe 3
}
it should "be able to handle multiple threads asking for a unique id" in {
val conductor = new Conductor
import conductor._
var id1 = 0L
var id2 = 0L
var id3 = 0L
var id4 = 0L
var id5 = 0L
var id6 = 0L
threadNamed("t1") {
id1 = UniqueId.getUniqueId
id1 should not be 0
}
threadNamed("t2") {
id2 = UniqueId.getUniqueId
id2 should not be 0
}
threadNamed("t3") {
id3 = UniqueId.getUniqueId
id3 should not be 0
}
threadNamed("t4") {
id4 = UniqueId.getUniqueId
id4 should not be 0
}
threadNamed("t5") {
id5 = UniqueId.getUniqueId
id5 should not be 0
}
threadNamed("t6") {
id6 = UniqueId.getUniqueId
id6 should not be 0
}
whenFinished {
val ids = Set(id1, id2, id3, id4, id5, id6)
ids.size shouldBe 6
ids.max shouldBe 6
}
}
it should "be able to reset the initial value" in {
UniqueId.getUniqueId shouldBe 1
val customId = 5
UniqueId.reset(customId)
UniqueId.getUniqueId shouldBe customId
}
}
| modsrm/ctask | common/src/test/scala/com/ctask/utils/UniqueIdSpec.scala | Scala | gpl-3.0 | 1,635 |
package com.twitter.finagle.context
/**
* [[com.twitter.finagle.context.Context]]s that are managed by Finagle.
*/
object Contexts {
/**
* Local contexts have lifetimes bound by Finagle server requests.
* They are local to the process.
*/
val local: LocalContext = new LocalContext
/**
* Broadcast contexts may be marshalled and transmitted across
* process boundaries. Finagle clients typically marshal the
* current context state for outbound requests; Finagle servers
* receive marshalled contexts and restore them before dispatching
* a new request.
*
* Thus broadcast contexts are transmitted throughout an entire
* request tree, so long as the protocols involved support
* marshalled context values.
*/
val broadcast: MarshalledContext = new MarshalledContext
/**
* Executes `fn` with all Finagle [[Context contexts]] cleared.
*
* This should be used when Finagle needs to execute work
* that is not request scoped but may be triggered by something
* that is part of the request. For example, processing a user
* request may create a "background Future-loop" which should
* not capture these request scoped values.
*/
def letClear[R](fn: => R): R =
local.letClear() {
broadcast.letClear() {
fn
}
}
}
| adriancole/finagle | finagle-core/src/main/scala/com/twitter/finagle/context/Contexts.scala | Scala | apache-2.0 | 1,315 |
package uk.ac.ncl.openlab.intake24.sql.tools.food.migrations
import java.io.FileReader
import anorm.{BatchSql, NamedParameter, SqlParser, ~, _}
import au.com.bytecode.opencsv.CSVReader
import org.rogach.scallop.ScallopConf
import uk.ac.ncl.openlab.intake24.sql.tools.{DatabaseConfigurationOptions, MigrationRunner}
import scala.collection.JavaConverters._
import scala.language.reflectiveCalls
object FoodV14_CreateMasterNutrientsList extends App with MigrationRunner {
val options = new ScallopConf(args) with DatabaseConfigurationOptions {
val nutrientsList = opt[String](required = true)
}
options.verify()
private case class NutrientRow(id: Long, description: String, unit: Long)
runMigration(14l, 15l, options) {
implicit connection =>
val unitIds = SQL("SELECT id, symbol FROM nutrient_units").executeQuery().as((SqlParser.long("id") ~ SqlParser.str("symbol")).*).map {
case id ~ symbol => symbol -> id
}.toMap
println("Parsing nutrient list CSV...")
val reader = new CSVReader(new FileReader(options.nutrientsList()))
val lines = reader.readAll().asScala.toIndexedSeq
reader.close()
val nutrientNames = lines(0).tail
val units = lines(1).tail.map(s => unitIds(s))
println("Deleting existing nutrient types...")
SQL("DELETE FROM nutrient_types").execute()
val unitParams = nutrientNames.zip(units).zipWithIndex.map {
case ((description, unit), index) =>
Seq[NamedParameter]('id -> (index + 1), 'description -> description, 'unit_id -> unit)
}
println("Creating new nutrient types...")
BatchSql("INSERT INTO nutrient_types VALUES({id},{description},{unit_id})", unitParams.head, unitParams.tail: _*).execute()
}
}
| digitalinteraction/intake24 | DatabaseTools/src/main/scala/uk/ac/ncl/openlab/intake24/sql/tools/food/migrations/FoodV14_CreateMasterNutrientsList.scala | Scala | apache-2.0 | 1,767 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.common
/**
* Indicates the brokerId stored in logDirs is not consistent across logDirs.
*/
class InconsistentBrokerIdException(message: String, cause: Throwable) extends RuntimeException(message, cause) {
def this(message: String) = this(message, null)
def this(cause: Throwable) = this(null, cause)
def this() = this(null, null)
}
| flange/drift-dev | kafka/00-kafka_2.11-0.10.1.0/libs/tmp/kafka/common/InconsistentBrokerIdException.scala | Scala | apache-2.0 | 1,175 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.scala.batch
import org.apache.flink.api.scala._
import org.apache.flink.table.api.TableEnvironment
import org.apache.flink.table.api.scala._
import org.apache.flink.test.util.MultipleProgramsTestBase
import org.junit.Assert.assertEquals
import org.junit._
class ExplainTest
extends MultipleProgramsTestBase(MultipleProgramsTestBase.TestExecutionMode.CLUSTER) {
val testFilePath = ExplainTest.this.getClass.getResource("/").getFile
@Test
def testFilterWithoutExtended() : Unit = {
val env = ExecutionEnvironment.getExecutionEnvironment
val tEnv = TableEnvironment.getTableEnvironment(env)
val table = env.fromElements((1, "hello"))
.toTable(tEnv, 'a, 'b)
.filter("a % 2 = 0")
val result = tEnv.explain(table).replaceAll("\\r\\n", "\n")
val source = scala.io.Source.fromFile(testFilePath +
"../../src/test/scala/resources/testFilter0.out").mkString.replaceAll("\\r\\n", "\n")
assertEquals(result, source)
}
@Test
def testFilterWithExtended() : Unit = {
val env = ExecutionEnvironment.getExecutionEnvironment
val tEnv = TableEnvironment.getTableEnvironment(env)
val table = env.fromElements((1, "hello"))
.toTable(tEnv, 'a, 'b)
.filter("a % 2 = 0")
val result = tEnv.explain(table, true).replaceAll("\\r\\n", "\n")
val source = scala.io.Source.fromFile(testFilePath +
"../../src/test/scala/resources/testFilter1.out").mkString.replaceAll("\\r\\n", "\n")
assertEquals(result, source)
}
@Test
def testJoinWithoutExtended() : Unit = {
val env = ExecutionEnvironment.getExecutionEnvironment
val tEnv = TableEnvironment.getTableEnvironment(env)
val table1 = env.fromElements((1, "hello")).toTable(tEnv, 'a, 'b)
val table2 = env.fromElements((1, "hello")).toTable(tEnv, 'c, 'd)
val table = table1.join(table2).where("b = d").select("a, c")
val result = tEnv.explain(table).replaceAll("\\r\\n", "\n")
val source = scala.io.Source.fromFile(testFilePath +
"../../src/test/scala/resources/testJoin0.out").mkString.replaceAll("\\r\\n", "\n")
assertEquals(source, result)
}
@Test
def testJoinWithExtended() : Unit = {
val env = ExecutionEnvironment.getExecutionEnvironment
val tEnv = TableEnvironment.getTableEnvironment(env)
val table1 = env.fromElements((1, "hello")).toTable(tEnv, 'a, 'b)
val table2 = env.fromElements((1, "hello")).toTable(tEnv, 'c, 'd)
val table = table1.join(table2).where("b = d").select("a, c")
val result = tEnv.explain(table, true).replaceAll("\\r\\n", "\n")
val source = scala.io.Source.fromFile(testFilePath +
"../../src/test/scala/resources/testJoin1.out").mkString.replaceAll("\\r\\n", "\n")
assertEquals(source, result)
}
@Test
def testUnionWithoutExtended() : Unit = {
val env = ExecutionEnvironment.getExecutionEnvironment
val tEnv = TableEnvironment.getTableEnvironment(env)
val table1 = env.fromElements((1, "hello")).toTable(tEnv, 'count, 'word)
val table2 = env.fromElements((1, "hello")).toTable(tEnv, 'count, 'word)
val table = table1.unionAll(table2)
val result = tEnv.explain(table).replaceAll("\\r\\n", "\n")
val source = scala.io.Source.fromFile(testFilePath +
"../../src/test/scala/resources/testUnion0.out").mkString.replaceAll("\\r\\n", "\n")
assertEquals(result, source)
}
@Test
def testUnionWithExtended() : Unit = {
val env = ExecutionEnvironment.getExecutionEnvironment
val tEnv = TableEnvironment.getTableEnvironment(env)
val table1 = env.fromElements((1, "hello")).toTable(tEnv, 'count, 'word)
val table2 = env.fromElements((1, "hello")).toTable(tEnv, 'count, 'word)
val table = table1.unionAll(table2)
val result = tEnv.explain(table, true).replaceAll("\\r\\n", "\n")
val source = scala.io.Source.fromFile(testFilePath +
"../../src/test/scala/resources/testUnion1.out").mkString.replaceAll("\\r\\n", "\n")
assertEquals(result, source)
}
}
| DieBauer/flink | flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/scala/batch/ExplainTest.scala | Scala | apache-2.0 | 4,817 |
package com.twitter.finagle.util
import com.twitter.concurrent.NamedPoolThreadFactory
import com.twitter.conversions.time._
import com.twitter.finagle.stats.{ReadableStat, InMemoryStatsReceiver}
import java.util.concurrent.TimeUnit
import org.jboss.netty.util.{TimerTask, HashedWheelTimer}
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.concurrent.{Eventually, IntegrationPatience}
import org.scalatest.junit.JUnitRunner
import org.scalatest.mock.MockitoSugar
@RunWith(classOf[JUnitRunner])
class TimerStatsTest extends FunSuite
with MockitoSugar
with Eventually
with IntegrationPatience
{
test("deviation") {
val tickDuration = 10.milliseconds
val hwt = new HashedWheelTimer(
new NamedPoolThreadFactory(getClass.getSimpleName),
tickDuration.inMillis, TimeUnit.MILLISECONDS)
val sr = new InMemoryStatsReceiver()
val deviation: ReadableStat = sr.stat("deviation_ms")
assert(deviation().isEmpty)
TimerStats.deviation(hwt, tickDuration, sr)
// assert that we capture at least 3 samples
eventually {
assert(deviation().size >= 3)
}
hwt.stop()
}
test("hashedWheelTimerInternals") {
val tickDuration = 10.milliseconds
val hwt = new HashedWheelTimer(
new NamedPoolThreadFactory(getClass.getSimpleName),
tickDuration.inMillis, TimeUnit.MILLISECONDS)
val sr = new InMemoryStatsReceiver()
val pendingTimeouts: ReadableStat = sr.stat("pending_tasks")
// nothing should be scheduled at first
assert(pendingTimeouts().isEmpty)
val nTasks = 5
// schedule some tasks, but they won't run for 10 minutes
// to ensure they are queued up when the monitoring task runs
for (_ <- 0.until(nTasks))
hwt.newTimeout(mock[TimerTask], 10, TimeUnit.MINUTES)
// kick off the task to do the monitoring.
// have the monitoring task to run quickly the first time and only once.
var count = 0
val nextRunAt = () => {
count += 1
if (count == 1) 1.millisecond else 5.minutes
}
TimerStats.hashedWheelTimerInternals(hwt, nextRunAt, sr)
eventually {
// we should have the nTasks tasks pending that we've scheduled
// plus it should see the monitoring task itself which runs it.
assert(pendingTimeouts() == Seq(nTasks + 1))
}
hwt.stop()
}
}
| lucaslanger/finagle | finagle-core/src/test/scala/com/twitter/finagle/util/TimerStatsTest.scala | Scala | apache-2.0 | 2,346 |
package org.bitcoins.rpc.marshallers.wallet
import org.bitcoins.rpc.bitcoincore.wallet.WalletInfo
import org.scalatest.{FlatSpec, MustMatchers}
import spray.json._
/**
* Created by Tom on 1/6/2016.
*/
class WalletMarshallerTest extends FlatSpec with MustMatchers {
var str =
"""
|{
| "walletversion" : 60000,
| "balance" : 0.39842624,
| "unconfirmed_balance" : 0.00000000,
| "immature_balance" : 0.00000000,
| "txcount" : 38,
| "keypoololdest" : 1430522134,
| "keypoolsize" : 101
|}
""".stripMargin
val json = str.parseJson
"WalletMarshaller" must "parse wallet information" in {
val wallet : WalletInfo = WalletMarshaller.WalletFormatter.read(json)
wallet.walletVersion must be (60000)
wallet.balance must be (0.39842624)
wallet.unconfirmedBalance must be (0)
wallet.immatureBalance must be (0)
wallet.txCount must be (38)
wallet.keyPoolOldest must be (1430522134)
wallet.keyPoolSize must be (101)
}
it must "write wallet info" in {
val json = str.parseJson
val wallet : WalletInfo = WalletMarshaller.WalletFormatter.read(json)
val writtenWallet = WalletMarshaller.WalletFormatter.write(wallet)
writtenWallet.asJsObject.fields("walletversion") must be (JsNumber(60000))
writtenWallet.asJsObject.fields("balance") must be (JsNumber(0.39842624))
writtenWallet.asJsObject.fields("unconfirmed_balance") must be (JsNumber(0.00000000))
writtenWallet.asJsObject.fields("immature_balance") must be (JsNumber(0.00000000))
writtenWallet.asJsObject.fields("txcount") must be (JsNumber(38))
writtenWallet.asJsObject.fields("keypoololdest") must be (JsNumber(1430522134))
writtenWallet.asJsObject.fields("keypoolsize") must be (JsNumber(101))
}
}
| bitcoin-s/bitcoin-s-rpc-client | src/test/scala/org/bitcoins/rpc/marshallers/wallet/WalletMarshallerTest.scala | Scala | mit | 1,798 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.twitter.scalding.commons.source
import cascading.flow.FlowDef
import cascading.pipe.Pipe
import cascading.scheme.Scheme
import cascading.tap.Tap
import cascading.tuple.Fields
import com.twitter.algebird.Monoid
import com.twitter.bijection.Injection
import com.twitter.chill.Externalizer
import com.twitter.scalding.TDsl._
import com.twitter.scalding._
import com.twitter.scalding.commons.scheme.KeyValueByteScheme
import com.twitter.scalding.commons.tap.VersionedTap
import com.twitter.scalding.commons.tap.VersionedTap.TapMode
import com.twitter.scalding.source.{CheckedInversion, MaxFailuresCheck}
import com.twitter.scalding.typed.KeyedListLike
import com.twitter.scalding.typed.TypedSink
import org.apache.hadoop.mapred.JobConf
import scala.collection.JavaConverters._
/**
* Source used to write key-value pairs as byte arrays into a versioned store. Supports incremental updates
* via the monoid on V.
*/
object VersionedKeyValSource {
val defaultVersionsToKeep = 3
// TODO: have two apply methods here for binary compatibility purpose. Need to clean it up in next release.
def apply[K, V](
path: String,
sourceVersion: Option[Long] = None,
sinkVersion: Option[Long] = None,
maxFailures: Int = 0
)(implicit codec: Injection[(K, V), (Array[Byte], Array[Byte])]) =
new VersionedKeyValSource[K, V](path, sourceVersion, sinkVersion, maxFailures, defaultVersionsToKeep)
def apply[K, V](
path: String,
sourceVersion: Option[Long],
sinkVersion: Option[Long],
maxFailures: Int,
versionsToKeep: Int
)(implicit codec: Injection[(K, V), (Array[Byte], Array[Byte])]) =
new VersionedKeyValSource[K, V](path, sourceVersion, sinkVersion, maxFailures, versionsToKeep)
}
class VersionedKeyValSource[K, V](
val path: String,
val sourceVersion: Option[Long],
val sinkVersion: Option[Long],
val maxFailures: Int,
val versionsToKeep: Int
)(implicit @transient codec: Injection[(K, V), (Array[Byte], Array[Byte])])
extends Source
with Mappable[(K, V)]
with TypedSink[(K, V)] {
import Dsl._
val keyField = "key"
val valField = "value"
val fields = new Fields(keyField, valField)
val codecBox = Externalizer(codec)
override def converter[U >: (K, V)] = TupleConverter.asSuperConverter[(K, V), U](TupleConverter.of[(K, V)])
override def setter[U <: (K, V)] = TupleSetter.asSubSetter[(K, V), U](TupleSetter.of[(K, V)])
def hdfsScheme =
HadoopSchemeInstance(new KeyValueByteScheme(fields).asInstanceOf[Scheme[_, _, _, _, _]])
@deprecated("This method is deprecated", "0.1.6")
def this(path: String, sourceVersion: Option[Long], sinkVersion: Option[Long], maxFailures: Int)(implicit
@transient codec: Injection[(K, V), (Array[Byte], Array[Byte])]
) =
this(path, sourceVersion, sinkVersion, maxFailures, VersionedKeyValSource.defaultVersionsToKeep)(codec)
def getTap(mode: TapMode) = {
val tap = new VersionedTap(path, hdfsScheme, mode).setVersionsToKeep(versionsToKeep)
(sourceVersion, sinkVersion) match {
case (Some(v), _) if mode == TapMode.SOURCE =>
tap.setVersion(v)
case (_, Some(v)) if mode == TapMode.SINK =>
tap.setVersion(v)
case _ =>
tap
}
}
val source = getTap(TapMode.SOURCE)
val sink = getTap(TapMode.SINK)
override def validateTaps(mode: Mode): Unit =
// if a version is explicitly supplied, ensure that it exists
sourceVersion.foreach { version =>
mode match {
case hadoopMode: HadoopMode => {
val store = source.getStore(new JobConf(hadoopMode.jobConf))
if (!store.hasVersion(version)) {
throw new InvalidSourceException(
"Version %s does not exist. Currently available versions are: %s"
.format(version, store.getAllVersions)
)
}
}
case _ =>
throw new IllegalArgumentException(
"VersionedKeyValSource does not support mode %s. Only HadoopMode is supported"
.format(mode)
)
}
}
def resourceExists(mode: Mode): Boolean =
mode match {
case Test(buffers) => {
buffers(this).map(!_.isEmpty).getOrElse(false)
}
case HadoopTest(conf, buffers) => {
buffers(this).map(!_.isEmpty).getOrElse(false)
}
case _ => {
val conf = new JobConf(mode.asInstanceOf[HadoopMode].jobConf)
source.resourceExists(conf)
}
}
def sinkExists(mode: Mode): Boolean =
sinkVersion.exists { version =>
mode match {
case Test(buffers) =>
buffers(this).map(!_.isEmpty).getOrElse(false)
case HadoopTest(conf, buffers) =>
buffers(this).map(!_.isEmpty).getOrElse(false)
case m: HadoopMode =>
val conf = new JobConf(m.jobConf)
val store = sink.getStore(conf)
store.hasVersion(version)
case _ => sys.error(s"Unknown mode $mode")
}
}
override def createTap(readOrWrite: AccessMode)(implicit mode: Mode): Tap[_, _, _] = {
import com.twitter.scalding.CastHfsTap
mode match {
case Hdfs(_strict, _config) =>
readOrWrite match {
case Read => CastHfsTap(source)
case Write => CastHfsTap(sink)
}
case _ =>
TestTapFactory(this, hdfsScheme).createTap(readOrWrite)
}
}
// Override this for more control on failure on decode
protected lazy val checkedInversion: CheckedInversion[(K, V), (Array[Byte], Array[Byte])] =
new MaxFailuresCheck(maxFailures)(codecBox.get)
override def sinkFields: Fields = fields
override def transformForRead(pipe: Pipe): Pipe =
pipe.flatMap((keyField, valField) -> (keyField, valField)) { pair: (Array[Byte], Array[Byte]) =>
checkedInversion(pair)
}
override def transformForWrite(pipe: Pipe): Pipe =
pipe.mapTo((0, 1) -> (keyField, valField)) { pair: (K, V) =>
codecBox.get.apply(pair)
}
override def toIterator(implicit config: Config, mode: Mode): Iterator[(K, V)] = {
val tap = createTap(Read)(mode)
CascadingMode
.cast(mode)
.openForRead(config, tap)
.asScala
.flatMap { te =>
val item = te.selectTuple(fields)
mode match {
case _: TestMode =>
val key = item.getObject(0).asInstanceOf[K]
val value = item.getObject(1).asInstanceOf[V]
Some((key, value))
case _ =>
val key = item.getObject(0).asInstanceOf[Array[Byte]]
val value = item.getObject(1).asInstanceOf[Array[Byte]]
checkedInversion((key, value))
}
}
}
override def toString =
"%s path:%s,sourceVersion:%s,sinkVersion:%s".format(getClass(), path, sourceVersion, sinkVersion)
override def equals(other: Any) =
if (other.isInstanceOf[VersionedKeyValSource[_, _]]) {
val otherSrc = other.asInstanceOf[VersionedKeyValSource[K, V]]
otherSrc.path == path && otherSrc.sourceVersion == sourceVersion && otherSrc.sinkVersion == sinkVersion
} else {
false
}
override def hashCode = toString.hashCode
}
object RichPipeEx extends java.io.Serializable {
implicit def pipeToRichPipeEx(pipe: Pipe): RichPipeEx = new RichPipeEx(pipe)
implicit def typedPipeToRichPipeEx[K: Ordering, V: Monoid](pipe: TypedPipe[(K, V)]): TypedRichPipeEx[K, V] =
new TypedRichPipeEx(pipe)
implicit def keyedListLikeToRichPipeEx[K: Ordering, V: Monoid, T[K, +V] <: KeyedListLike[K, V, T]](
kll: KeyedListLike[K, V, T]
): TypedRichPipeEx[K, V] = typedPipeToRichPipeEx(kll.toTypedPipe)
}
class TypedRichPipeEx[K: Ordering, V: Monoid](pipe: TypedPipe[(K, V)]) extends java.io.Serializable {
import Dsl._
// Tap reads existing data from the `sourceVersion` (or latest
// version) of data specified in `src`, merges the K,V pairs from
// the pipe in using an implicit `Monoid[V]` and sinks all results
// into the `sinkVersion` of data (or a new version) specified by
// `src`.
def writeIncremental(src: VersionedKeyValSource[K, V], reducers: Int = 1)(implicit
flowDef: FlowDef,
mode: Mode
): TypedPipe[(K, V)] = {
val outPipe =
if (!src.resourceExists(mode))
pipe
else {
val oldPairs = TypedPipe
.from[(K, V)](src.read, (0, 1))
.map { case (k, v) => (k, v, 0) }
val newPairs = pipe.sumByLocalKeys.map { case (k, v) => (k, v, 1) }
(oldPairs ++ newPairs)
.groupBy(_._1)
.withReducers(reducers)
.sortBy(_._3)
.mapValues(_._2)
.sum
.toTypedPipe
}
outPipe.write(src)
}
}
class RichPipeEx(pipe: Pipe) extends java.io.Serializable {
import Dsl._
// VersionedKeyValSource always merges with the most recent complete
// version
def writeIncremental[K, V](src: VersionedKeyValSource[K, V], fields: Fields, reducers: Int = 1)(implicit
monoid: Monoid[V],
flowDef: FlowDef,
mode: Mode
) = {
def appendToken(pipe: Pipe, token: Int) =
pipe.mapTo((0, 1) -> ('key, 'value, 'isNew)) { pair: (K, V) => pair :+ token }
val outPipe =
if (!src.resourceExists(mode))
pipe
else {
val oldPairs = appendToken(src.read, 0)
val newPairs = appendToken(pipe, 1)
(oldPairs ++ newPairs)
.groupBy('key)(_.reducers(reducers).sortBy('isNew).sum[V]('value))
.project(('key, 'value))
.rename(('key, 'value) -> fields)
}
outPipe.write(src)
}
}
| twitter/scalding | scalding-commons/src/main/scala/com/twitter/scalding/commons/source/VersionedKeyValSource.scala | Scala | apache-2.0 | 10,102 |
/**
* Copyright (C) 2015 Orbeon, Inc.
*
* This program is free software; you can redistribute it and/or modify it under the terms of the
* GNU Lesser General Public License as published by the Free Software Foundation; either version
* 2.1 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Lesser General Public License for more details.
*
* The full text of the license is available at http://www.gnu.org/copyleft/lesser.html
*/
package org.orbeon.oxf.fr
import org.orbeon.oxf.fr.FormRunner._
import org.orbeon.oxf.fr.XMLNames._
import org.orbeon.oxf.test.{DocumentTestBase, ResourceManagerSupport}
import org.orbeon.oxf.util.StringUtils._
import org.orbeon.oxf.xml.TransformerUtils
import org.orbeon.scaxon.SimplePath._
import org.scalatest.FunSpecLike
class EmailTest
extends DocumentTestBase
with ResourceManagerSupport
with FunSpecLike {
val FormWithEmailControls = "oxf:/org/orbeon/oxf/fr/form-with-email-controls.xhtml"
describe("Email address extraction from form definition") {
val formDoc = readURLAsImmutableXMLDocument(FormWithEmailControls)
val head = formDoc.rootElement / XHHeadTest head
val model = head / XFModelTest head
val instance = model descendant XFInstanceTest filter (_.id == "fr-form-instance") head
val body = formDoc.rootElement / XHBodyTest head
val data =
TransformerUtils.extractAsMutableDocument(instance child * head)
def valuesForSearch(search: β Seq[ControlBindPathHoldersResources]) = {
val searchResult = search
val distinctPaths =
searchResult map { case ControlBindPathHoldersResources(_, _, path, _, _) β path map (_.value) mkString "/" } distinct
val values =
searchResult.flatMap(_.holders).flatten.map(_.getStringValue).to[List]
(values, distinctPaths)
}
describe("without section templates") {
val expectedForClassName = List(
("fr-email-recipient" , List("[email protected]") , List("section-1/control-1")),
("fr-email-subject" , Nil , Nil),
("fr-attachment" , List("attachment-13.bin", "attachment-14.bin"), List("section-1/control-13", "section-1/control-14")),
("fr-attachment fr-email-attachment", List("attachment-14.bin") , List("section-1/control-14"))
)
for ((classNames, expectedValues, expectedPath) β expectedForClassName)
it(s"must pass with $classNames") {
val (actualValues, actualPaths) = valuesForSearch {
searchControlsTopLevelOnly(
body,
Some(data),
FormRunner.hasAllClassesPredicate(classNames.splitTo[List]())
)
}
assert(expectedValues === actualValues)
assert(expectedPath === actualPaths)
}
}
describe("within section templates") {
val expectedForClassName = List(
("fr-email-recipient" , List("[email protected]", "[email protected]", "[email protected]"), List("section-3/control-1", "section-3/section-3/grid-4/grid-4-iteration/control-6")),
("fr-email-subject" , List("Abc", "Def", "Ghi") , List("section-3/control-8", "section-3/section-3/grid-4/grid-4-iteration/control-7")),
("fr-attachment" , List("attachment-10.bin", "attachment-11.bin") , List("section-3/control-10","section-3/control-11")),
("fr-attachment fr-email-attachment", List("attachment-11.bin") , List("section-3/control-11"))
)
for ((classNames, expectedValues, expectedPath) β expectedForClassName)
it(s"must pass with $classNames") {
val (actualValues, actualPaths) = valuesForSearch {
searchControlsUnderSectionTemplates(
head,
body,
Some(data),
FormRunner.hasAllClassesPredicate(classNames.splitTo[List]())
)
}
assert(expectedValues === actualValues)
assert(expectedPath === actualPaths)
}
}
}
}
| brunobuzzi/orbeon-forms | form-runner/jvm/src/test/scala/org/orbeon/oxf/fr/EmailTest.scala | Scala | lgpl-2.1 | 4,456 |
package com.github.mdr.mash.inference
import java.{ util β ju }
import com.github.mdr.mash.classes.{ MashClass, UserDefinedMethod }
import com.github.mdr.mash.evaluator.{ SystemCommandFunction, _ }
import com.github.mdr.mash.functions._
import com.github.mdr.mash.inference.Type.UserClassInstance
import com.github.mdr.mash.ns.collections.{ GroupClass, ListClass }
import com.github.mdr.mash.ns.core._
import com.github.mdr.mash.ns.core.help.MethodHelpClass
import com.github.mdr.mash.ns.os.{ PathClass, ProcessResultClass }
import com.github.mdr.mash.parser.AbstractSyntax.{ FunctionDeclaration, _ }
import com.github.mdr.mash.parser.QuotationType
import com.github.mdr.mash.runtime.{ MashString, MashValue }
import com.github.mdr.mash.utils.Utils._
import scala.PartialFunction._
import scala.collection.immutable.ListMap
import scala.collection.mutable.ArrayBuffer
case class ValueInfo(valueOpt: Option[MashValue], typeOpt: Option[Type])
object TypeInferencer {
val ThisName = "this"
def getAliases(decl: FunctionDeclaration): Seq[String] =
for {
attribute β decl.attributes.filter(_.name == Attributes.Alias)
arguments β attribute.argumentsOpt
boundTypes = AliasParameterModel.params.bindTypes(TypedArguments.from(arguments))
nameArg β boundTypes.getArgument(AliasParameterModel.Params.Name)
alias β nameArg.valueOpt.collect { case s: MashString β s.s }
} yield alias
}
class TypeInferencer extends InvocationTypeInferencer with BinaryOperatorTypeInferencer {
import TypeInferencer._
/**
* We maintain a visited map to avoid loops in the type inference (this can happen, for example, when
* typing fixed-point combinators)
*/
private val visitedMap: ju.IdentityHashMap[Expr, Boolean] = new ju.IdentityHashMap
/**
* Attempt to infer the type of a given expression (and subexpression).
*
* @param bindings -- known type information about the variable bindings available to this expression.
* @param immediateExec -- if true, immediately "execute" (at the type level) nullary functions and methods
*/
def inferType(expr: Expr, bindings: Map[String, Type], immediateExec: Boolean = true): Option[Type] =
if (visitedMap.containsKey(expr))
expr.typeOpt
else {
visitedMap.put(expr, true)
try {
val typeOpt = inferType_(expr, bindings, immediateExec)
expr.typeOpt = typeOpt orElse Some(AnyClass)
expr.typeBindings = bindings
typeOpt
} finally
visitedMap.remove(expr)
}
private def inferType_(expr: Expr, bindings: Map[String, Type], immediateExec: Boolean = true): Option[Type] =
expr match {
case Hole(_, _) | PipeExpr(_, _, _) β None // should not be present in AST at this point
case memberExpr: HeadlessMemberExpr β None // should not be present in AST at this point
case MishFunction(command, _) β Some(Type.BuiltinFunction(SystemCommandFunction(command)))
case ParenExpr(body, _) β inferType(body, bindings)
case BlockExpr(body, _) β inferType(body, bindings)
case Literal(x, _) β Some(ValueTypeDetector.getType(x))
case StringLiteral(s, QuotationType.Double, _, _) β Some(StringClass taggedWith PathClass)
case StringLiteral(s, QuotationType.Single, _, _) β Some(StringClass)
case is: InterpolatedString β inferType(is, bindings)
case MinusExpr(_, _) β Some(NumberClass)
case binOpExpr: BinOpExpr β inferTypeBinOpExpr(binOpExpr, bindings)
case chainedOpExpr: ChainedOpExpr β inferTypeChainedOpExpr(chainedOpExpr, bindings)
case memberExpr: MemberExpr β inferType(memberExpr, bindings, immediateExec)
case lookupExpr: LookupExpr β inferType(lookupExpr, bindings)
case ifExpr: IfExpr β inferType(ifExpr, bindings)
case objectExpr: ObjectExpr β inferType(objectExpr, bindings)
case identifier: Identifier β inferType(identifier, bindings, immediateExec)
case mishExpr: MishExpr β inferType(mishExpr, bindings)
case invocationExpr: InvocationExpr β inferTypeInvocationExpr(invocationExpr, bindings)
case interpolation: MishInterpolation β inferType(interpolation, bindings)
case listExpr: ListExpr β inferType(listExpr, bindings)
case assignmentExpr: AssignmentExpr β inferType(assignmentExpr, bindings)
case assignmentExpr: PatternAssignmentExpr β inferType(assignmentExpr, bindings)
case statementSeq: StatementSeq β inferType(statementSeq, bindings)
case helpExpr: HelpExpr β inferType(helpExpr, bindings)
case functionDecl: FunctionDeclaration β inferType(functionDecl, bindings)
case classDecl: ClassDeclaration β inferType(classDecl, bindings)
case lambda: LambdaExpr β inferType(lambda, bindings)
case thisExpr: ThisExpr β bindings.get(ThisName)
case importStatement: ImportStatement β inferType(importStatement, bindings)
}
private def inferType(importStatement: ImportStatement, bindings: Map[String, Type]): Option[Type] = {
inferType(importStatement.expr, bindings)
None
}
private def inferType(lambdaExpr: LambdaExpr, bindings: Map[String, Type]): Option[Type] = {
val preliminaryBindings = inferType(lambdaExpr.params, bindings)
inferType(lambdaExpr.body, bindings ++ preliminaryBindings)
val params = Evaluator.parameterModel(lambdaExpr.params)
Some(Type.UserDefinedFunction(docCommentOpt = None, isPrivate = false, nameOpt = None, params, lambdaExpr.body, bindings))
}
private def inferType(params: ParamList, bindings: Map[String, Type]): Map[String, Type] =
(for {
param β params.params
paramTypeOpt = param.defaultExprOpt.flatMap(inferType(_, bindings))
(name, type_) β TypeParamBindingContext.bindParam(Evaluator.makeParameter(param), paramTypeOpt)
} yield name -> type_).toMap
private def inferType(functionDecl: FunctionDeclaration, bindings: Map[String, Type]): Option[Type] = {
val preliminaryBodyBindings = inferType(functionDecl.params, bindings)
inferType(functionDecl.body, bindings ++ preliminaryBodyBindings)
Some(getFunctionType(functionDecl, bindings))
}
private def inferType(classDeclaration: ClassDeclaration, bindings: Map[String, Type]): Option[Type] = {
val fieldBindings = inferType(classDeclaration.params, bindings)
val methods = classDeclaration.bodyOpt.toSeq.flatMap(_.methods)
val classType = getUserClassType(classDeclaration, bindings)
val thisType = UserClassInstance(classType)
val initialClassBindings =
bindings ++
fieldBindings ++
Seq(ThisName -> thisType)
val methodBindings =
for (method β methods)
yield method.name -> inferType(method, initialClassBindings).getOrElse(Type.Any)
val updatedClassBindings = initialClassBindings ++ methodBindings
methods.foreach(inferType(_, updatedClassBindings))
Some(classType)
}
private def inferType(helpExpr: HelpExpr, bindings: Map[String, Type]): Option[Type] =
inferType(helpExpr.expr, bindings, immediateExec = false) collect {
case Type.BuiltinFunction(_) β FunctionClass
case Type.BoundBuiltinMethod(_, _) β MethodHelpClass
case _: Type.UserDefinedFunction β FunctionClass
case _: Type.BoundUserDefinedMethod β MethodHelpClass
}
private def inferType(statementSeq: StatementSeq, bindings: Map[String, Type]): Option[Type] = {
var latestBindings = bindings
for (statement β statementSeq.statements) {
inferType(statement, latestBindings)
statement match {
case AssignmentExpr(Identifier(name, _), _, _, _) β
statement.typeOpt.foreach(latestBindings += name -> _)
case PatternAssignmentExpr(pattern, right, _) β
latestBindings ++= TypeParamBindingContext.bindPatternParam(Evaluator.makeParamPattern(pattern), right.typeOpt)
case functionDeclaration: FunctionDeclaration β
latestBindings += functionDeclaration.name -> getFunctionType(functionDeclaration, latestBindings)
case classDeclaration: ClassDeclaration β
val userClassType = getUserClassType(classDeclaration, latestBindings)
latestBindings += userClassType.name -> userClassType
case _ β
}
}
statementSeq.statements.lastOption.map(_.typeOpt).getOrElse(Some(Unit))
}
private def getFunctionType(functionDeclaration: FunctionDeclaration,
bindings: Map[String, Type]): Type.UserDefinedFunction = {
val FunctionDeclaration(docCommentOpt, attributes, name, paramList, body, _) = functionDeclaration
val isPrivate = attributes.exists(_.name == Attributes.Private)
Type.UserDefinedFunction(docCommentOpt, isPrivate, Some(name), Evaluator.parameterModel(paramList), body, bindings)
}
private def getUserClassType(classDeclaration: ClassDeclaration, bindings: Map[String, Type]): Type.UserClass = {
val ClassDeclaration(_, _, className, paramList, bodyOpt, _) = classDeclaration
var methodBindings = bindings // TODO: Should also include parent methods
val methods = ArrayBuffer[(String, Type.UserDefinedFunction)]()
for (decl β bodyOpt.toSeq.flatMap(_.methods)) {
val FunctionDeclaration(docCommentOpt, attributes, functionName, functionParamList, body, _) = decl
val functionParams = Evaluator.parameterModel(functionParamList)
val isPrivate = attributes.exists(_.name == Attributes.Private)
val methodType = Type.UserDefinedFunction(docCommentOpt, isPrivate, Some(functionName), functionParams, body, methodBindings)
for (name β functionName +: getAliases(decl)) {
methods += name -> methodType
methodBindings += name -> methodType
}
}
val classParams = Evaluator.parameterModel(paramList)
Type.UserClass(className, classParams, ListMap(methods: _*))
}
private def inferType(assignmentExpr: AssignmentExpr, bindings: Map[String, Type]): Option[Type] = {
val AssignmentExpr(left, operatorOpt, right, _) = assignmentExpr
val leftTypeOpt = inferType(left, bindings)
val rightTypeOpt = inferType(right, bindings)
operatorOpt.flatMap(op β inferTypeBinOpExpr(leftTypeOpt, op, rightTypeOpt, right)) orElse rightTypeOpt
}
private def inferType(assignmentExpr: PatternAssignmentExpr, bindings: Map[String, Type]): Option[Type] = {
val PatternAssignmentExpr(_, right, _) = assignmentExpr
val rightTypeOpt = inferType(right, bindings)
rightTypeOpt
}
private def inferType(listExpr: ListExpr, bindings: Map[String, Type]): Option[Type] = {
val elementTypes = listExpr.elements.flatMap(inferType(_, bindings))
val elementType = elementTypes.headOption.getOrElse(Type.Any)
Some(elementType.seq)
}
private def inferType(interpolation: MishInterpolation, bindings: Map[String, Type]): Option[Type] =
interpolation.part match {
case StringPart(s) β Some(StringClass)
case ExprPart(expr) β inferType(expr, bindings)
}
private def inferType(mishExpr: MishExpr, bindings: Map[String, Type]): Option[Type] = {
val MishExpr(command, args, redirects, captureProcessOutput, _) = mishExpr
inferType(command, bindings)
redirects.foreach(redirect β inferType(redirect.arg, bindings))
args.foreach(inferType(_, bindings))
Some(if (captureProcessOutput) ProcessResultClass else UnitClass)
}
private def inferType(interpolatedString: InterpolatedString, bindings: Map[String, Type]): Option[Type] = {
val InterpolatedString(_, parts, _, _) = interpolatedString
parts.foreach {
case ExprPart(expr) β inferType(expr, bindings)
case StringPart(s) β
}
Some(StringClass taggedWith PathClass)
}
private def inferType(identifier: Identifier, bindings: Map[String, Type], immediateExec: Boolean): Option[Type] =
bindings.get(identifier.name).when(immediateExec, inferImmediateExec(_, Some(identifier)))
private def inferType(name: String, bindings: Map[String, Type], immediateExec: Boolean): Option[Type] =
bindings.get(name).when(immediateExec, inferImmediateExec(_))
private def inferType(objectExpr: ObjectExpr, bindings: Map[String, Type]): Option[Type] = {
val ObjectExpr(entries, _) = objectExpr
val fieldTypes =
entries.flatMap {
case FullObjectEntry(fieldExpr, valueExpr, _) β
inferType(fieldExpr, bindings)
for {
label β getFieldName(fieldExpr)
type_ = inferType(valueExpr, bindings).getOrElse(Type.Any)
} yield label -> type_
case ShorthandObjectEntry(field, _) β
Seq(field -> inferType(field, bindings, immediateExec = true).getOrElse(Type.Any))
}
Some(Type.Object(fieldTypes.toMap))
}
private def getFieldName(label: Expr): Option[String] = condOpt(label) {
case Identifier(name, _) β name
case s: StringLiteral β s.s
}
private def inferType(memberExpr: MemberExpr, bindings: Map[String, Type], immediateExec: Boolean): Option[Type] = {
for {
targetType β inferType(memberExpr.target, bindings)
memberType β memberLookup(targetType, memberExpr.name, immediateExec = immediateExec, Some(memberExpr), Some(memberExpr.target))
} yield memberType
}
private def inferTypeChainedOpExpr(chainedOpExpr: Expr, bindings: Map[String, Type]): Option[Type] = {
val ChainedOpExpr(left, opRights, _) = chainedOpExpr
inferType(left, bindings)
for ((op, right) β opRights) yield op -> inferType(right, bindings)
Some(Type.Instance(BooleanClass))
}
private def inferType(ifExpr: IfExpr, bindings: Map[String, Type]): Option[Type] = {
val IfExpr(cond, body, elseOpt, _) = ifExpr
inferType(cond, bindings)
val bodyTypeOpt = inferType(body, bindings)
val elseTypeOpt = elseOpt.flatMap(inferType(_, bindings))
bodyTypeOpt orElse elseTypeOpt
}
private def memberLookup(targetType: Type, klass: MashClass, name: String): Option[Type] =
klass.fieldsMap.get(name).map(_.fieldType) orElse
klass.getMethod(name).map(getMethodType(targetType, _)) orElse
klass.parentOpt.flatMap(superClass β memberLookup(targetType, superClass, name))
private def getMethodType(targetType: Type, method: MashMethod) = method match {
case UserDefinedMethod(docCommentOpt, name, params, _, body, context, isPrivate, _, _) β
val bindings = new ValueTypeDetector().buildBindings(context.scopeStack.bindings)
val functionType = Type.UserDefinedFunction(docCommentOpt, isPrivate, Some(name), params, body, bindings)
Type.BoundUserDefinedMethod(targetType, functionType)
case _ β
Type.BoundBuiltinMethod(targetType, method)
}
protected def getStaticMethodType(targetExpr: Expr, name: String): Option[Type.BuiltinFunction] =
targetExpr
.constantValueOpt
.flatMap(getStaticMethod(_, name))
.map(Type.BuiltinFunction)
private def getStaticMethod(value: MashValue, name: String): Option[MashFunction] =
condOpt(value) { case klass: MashClass β klass }.flatMap(_ getStaticMethod name)
private def memberLookup(genericType: Type.Generic,
name: String): Option[Type] = genericType match {
case Type.Generic(GroupClass, keyType, elementType) β
if (name == GroupClass.Fields.Key.name)
Some(keyType)
else if (name == GroupClass.Fields.Values.name)
Some(elementType.seq)
else
memberLookup(genericType, GroupClass, name)
case Type.Generic(TimedResultClass, resultType) β
if (name == TimedResultClass.Fields.Result.name)
Some(resultType)
else
memberLookup(genericType, TimedResultClass, name)
case _ β
None
}
protected def getConstructor(userClass: Type.UserClass): Type.BuiltinFunction = {
object FakeFunction extends MashFunction(MashClass.ConstructorMethodName) {
override def call(boundParams: BoundParams): MashValue =
throw new AssertionError("Fake function cannot be executed")
override def summaryOpt = Some(s"Construct a new ${userClass.name} object")
override def params: ParameterModel = userClass.params
override def typeInferenceStrategy =
(inferencer: Inferencer, arguments: TypedArguments) => Some(UserClassInstance(userClass))
}
Type.BuiltinFunction(FakeFunction)
}
def memberLookup(targetType: Type,
name: String,
immediateExec: Boolean,
memberExprOpt: Option[MemberExpr] = None,
targetExprOpt: Option[Expr] = None): Option[Type] = {
val rawType = targetType match {
case Type.Instance(ClassClass) β targetExprOpt.flatMap(getStaticMethodType(_, name)) orElse memberLookup(targetType, ClassClass, name)
case userClass: Type.UserClass β if (name == MashClass.ConstructorMethodName) Some(getConstructor(userClass)) else memberLookup(targetType, ClassClass, name)
case Type.Instance(klass) β memberLookup(targetType, klass, name)
case userClassInstance: Type.UserClassInstance β memberLookup(userClassInstance, name)
case Type.Tagged(baseClass, tagClass) β memberLookup(targetType, baseClass, name) orElse memberLookup(targetType, tagClass, name)
case Type.Seq(elementType) β memberLookup(targetType, ListClass, name) orElse memberLookup(elementType, name, immediateExec, memberExprOpt, targetExprOpt).map(Type.Seq)
case Type.Object(knownFields) β knownFields.get(name) orElse memberLookup(targetType, ObjectClass, name)
case Type.BuiltinFunction(_) β memberLookup(targetType, FunctionClass, name)
case Type.UserDefinedFunction(_, _, _, _, _, _) β memberLookup(targetType, FunctionClass, name)
case Type.BoundUserDefinedMethod(_, _) β memberLookup(targetType, BoundMethodClass, name)
case Type.BoundBuiltinMethod(_, _) β memberLookup(targetType, BoundMethodClass, name)
case genericType: Type.Generic β memberLookup(genericType, name)
case _ β None
}
rawType.when(immediateExec, inferImmediateExec(_, memberExprOpt))
}
private def memberLookup(userClassInstance: Type.UserClassInstance, name: String): Option[Type] = {
val Type.UserClassInstance(Type.UserClass(_, params, methods)) = userClassInstance
val fieldLookup = params.params.find(_.nameOpt contains name).map(_ β Type.Instance(AnyClass))
val methodLookup = methods.get(name).map(Type.BoundUserDefinedMethod(userClassInstance, _))
fieldLookup orElse
methodLookup orElse
memberLookup(userClassInstance, ObjectClass, name)
}
/**
* Infer the type of an immediately-invoked nullary expression
*/
private def inferImmediateExec(intermediateTypeOpt: Option[Type], exprOpt: Option[Expr] = None): Option[Type] =
intermediateTypeOpt match {
case Some(Type.BuiltinFunction(f)) if f.allowsNullary β
exprOpt.foreach(_.preInvocationTypeOpt = intermediateTypeOpt)
f.typeInferenceStrategy.inferTypes(new Inferencer(this, Map()), TypedArguments())
case Some(Type.BoundBuiltinMethod(targetType, method)) if method.allowsNullary β
exprOpt.foreach(_.preInvocationTypeOpt = intermediateTypeOpt)
method.typeInferenceStrategy.inferTypes(new Inferencer(this, Map()), Some(targetType), TypedArguments())
case Some(Type.UserDefinedFunction(_, _, _, params, body, functionBindings)) if params.allowsNullary β
exprOpt.foreach(_.preInvocationTypeOpt = intermediateTypeOpt)
val argBindings = params.bindTypes(TypedArguments()).boundNames
inferType(body, functionBindings ++ argBindings)
case Some(Type.BoundUserDefinedMethod(targetType, method)) if method.params.allowsNullary β
val Type.UserDefinedFunction(_, _, _, params, body, methodBindings) = method
exprOpt.foreach(_.preInvocationTypeOpt = intermediateTypeOpt)
val argBindings = params.bindTypes(TypedArguments()).boundNames
inferType(body, methodBindings ++ argBindings ++ Seq(ThisName -> targetType))
case x β
x
}
private def inferType(lookupExpr: LookupExpr, bindings: Map[String, Type]): Option[Type] = {
val LookupExpr(targetExpr, indexExpr, _) = lookupExpr
val targetTypeOpt = inferType(targetExpr, bindings)
val indexTypeOpt = inferType(indexExpr, bindings)
indexExpr match {
case StringLiteral(s, _, _, _) β return targetTypeOpt.flatMap(memberLookup(_, s, immediateExec = false))
case _ β
}
condOpt((targetTypeOpt, indexTypeOpt)) {
case (Some(Type.Seq(elementType)), Some(Type.Instance(NumberClass))) β elementType
case (Some(Type.Instance(StringClass)), Some(Type.Instance(NumberClass))) β StringClass taggedWith CharacterClass
case (Some([email protected](StringClass, _)), Some(Type.Instance(NumberClass))) β taggedType
}
}
} | mdr/mash | src/main/scala/com/github/mdr/mash/inference/TypeInferencer.scala | Scala | mit | 22,161 |
package safe.actor
import akka.actor.{ ActorRef, Props, Status }
import scala.collection.mutable
import com.codahale.metrics.MetricRegistry
class ResequenceActor[A](f: A => SeqMetadata, next: Seq[ActorRef], metrics: Option[MetricRegistry]) extends FeatureActor {
// Used to keep track of the last number processed in the sequence
private[this] val sequenceCounts = new mutable.HashMap[String, Int]()
// Queue of messages waiting for an earlier number in the sequence to be released
private[this] val queuedMessages = new mutable.HashMap[String, mutable.Map[Int, A]]()
next foreach { l => addListener(l) }
val metricsName = "Actor (" + self.path + ")"
def receive = {
case a: A =>
val timeCtx = startTimer(metricsName, metrics)
try {
f(a) match {
case SeqMetadata(id, num, total) => {
var nextNum = sequenceCounts.getOrElseUpdate(id, 1)
val queue = queuedMessages.getOrElseUpdate(id, new mutable.HashMap[Int, A]())
if (nextNum == num) {
// This is the next item in the sequence, forward it on
gossip(a)
nextNum += 1
// Release any queued messages that are next in the sequence
while(queue.contains(nextNum)) {
gossip(queue(nextNum))
queue.remove(nextNum)
nextNum += 1
}
sequenceCounts(id) = nextNum
// If we released the last message in the sequence, clear the queues
if (nextNum > total) {
sequenceCounts.remove(id)
queuedMessages.remove(id)
}
}
else {
// This message is out of sequence, we need to queue it
queue.put(num, a)
}
}
}
}
catch {
case e: Throwable => sender ! Status.Failure(
new RuntimeException(self.path.toString + " failed to handle message " + a, e))
}
finally {
stopTimer(metricsName, timeCtx, metrics)
}
}
}
case class SeqMetadata(id: String, num: Int, total: Int)
object ResequenceActor {
def props[A](f: A => SeqMetadata, next: Seq[ActorRef] = Nil, metrics: Option[MetricRegistry] = None) =
Props(classOf[ResequenceActor[A]], f, next, metrics)
} | devonbryant/safe | safe-core/src/main/scala/safe/actor/ResequenceActor.scala | Scala | epl-1.0 | 2,400 |
package org.jetbrains.sbt
package annotator
import com.intellij.lang.annotation.{AnnotationHolder, Annotator}
import com.intellij.openapi.util.text.StringUtil
import com.intellij.psi.{PsiComment, PsiElement, PsiWhiteSpace}
import org.jetbrains.plugins.scala.lang.psi.api.expr.ScExpression
import org.jetbrains.plugins.scala.lang.psi.api.statements.{ScFunctionDefinition, ScPatternDefinition}
import org.jetbrains.plugins.scala.lang.psi.api.toplevel.imports.ScImportStmt
import org.jetbrains.plugins.scala.lang.psi.impl.ScalaPsiElementFactory
import org.jetbrains.plugins.scala.lang.psi.types
import org.jetbrains.plugins.scala.lang.psi.types.ScType
import org.jetbrains.plugins.scala.lang.psi.types.result.TypingContext
import org.jetbrains.sbt.language.SbtFileImpl
import org.jetbrains.sbt.settings.SbtSystemSettings
/**
* @author Pavel Fatin
*/
class SbtAnnotator extends Annotator {
def annotate(element: PsiElement, holder: AnnotationHolder): Unit = element match {
case file: SbtFileImpl =>
val sbtVersion =
SbtSystemSettings.getInstance(file.getProject)
.getLinkedProjectSettings(file)
.safeMap(_.sbtVersion)
.getOrElse(Sbt.LatestVersion)
new Worker(file.children.toVector, sbtVersion, holder).annotate()
case _ =>
}
private class Worker(sbtFileElements: Seq[PsiElement], sbtVersion: String, holder: AnnotationHolder) {
def annotate(): Unit = {
sbtFileElements.collect {
case exp: ScExpression => annotateTypeMismatch(exp)
case element => annotateNonExpression(element)
}
if (sbtVersionLessThan("0.13.7"))
annotateMissingBlankLines()
}
private def annotateNonExpression(element: PsiElement): Unit = element match {
case _: SbtFileImpl | _: ScImportStmt | _: PsiComment | _: PsiWhiteSpace =>
case _: ScFunctionDefinition | _: ScPatternDefinition if !sbtVersionLessThan("0.13.0") =>
case other => holder.createErrorAnnotation(other, SbtBundle("sbt.annotation.sbtFileMustContainOnlyExpressions"))
}
private def annotateTypeMismatch(expression: ScExpression): Unit =
expression.getType(TypingContext.empty).foreach { expressionType =>
if (expressionType.equiv(types.Nothing) || expressionType.equiv(types.Null)) {
holder.createErrorAnnotation(expression, SbtBundle("sbt.annotation.expectedExpressionType"))
} else {
if (!isTypeAllowed(expression, expressionType))
holder.createErrorAnnotation(expression, SbtBundle("sbt.annotation.expressionMustConform", expressionType))
}
}
private def findTypeByText(exp: ScExpression, text: String): Option[ScType] =
Option(ScalaPsiElementFactory.createTypeFromText(text, exp.getContext, exp))
private def isTypeAllowed(expression: ScExpression, expressionType: ScType): Boolean =
SbtAnnotator.AllowedTypes.exists(typeStr => findTypeByText(expression, typeStr) exists (t => expressionType conforms t))
private def annotateMissingBlankLines(): Unit =
sbtFileElements.sliding(3).foreach {
case Seq(_: ScExpression, space: PsiWhiteSpace, e: ScExpression) if space.getText.count(_ == '\\n') == 1 =>
holder.createErrorAnnotation(e, SbtBundle("sbt.annotation.blankLineRequired", sbtVersion))
case _ =>
}
private def sbtVersionLessThan(version: String): Boolean =
StringUtil.compareVersionNumbers(sbtVersion, version) < 0
}
}
object SbtAnnotator {
val AllowedTypes = List("Seq[Def.SettingsDefinition]", "Def.SettingsDefinition")
}
| double-y/translation-idea-plugin | src/org/jetbrains/sbt/annotator/SbtAnnotator.scala | Scala | apache-2.0 | 3,562 |
package pureconfig
/**
* A trait that can be implemented to customize how case classes are read from and written to a config.
*
* @tparam T the type of case class for which this hint applies
*/
trait ProductHint[T] {
/**
* Returns the key in the config object associated with a given case class field.
*
* @param fieldName the case class field
* @return the key in the config object associated with the given case class field.
*/
def configKey(fieldName: String): String
/**
* A boolean indicating if the default arguments of the case class should be used when fields are missing
*/
def useDefaultArgs: Boolean
/**
* A boolean indicating if config keys that do not map to a case class field are allowed in config objects
*/
def allowUnknownKeys: Boolean
}
private[pureconfig] case class ProductHintImpl[T](
fieldMapping: ConfigFieldMapping,
useDefaultArgs: Boolean,
allowUnknownKeys: Boolean) extends ProductHint[T] {
def configKey(fieldName: String) = fieldMapping(fieldName)
}
object ProductHint {
def apply[T](
fieldMapping: ConfigFieldMapping = ConfigFieldMapping(CamelCase, KebabCase),
useDefaultArgs: Boolean = true,
allowUnknownKeys: Boolean = true): ProductHint[T] =
ProductHintImpl[T](fieldMapping, useDefaultArgs, allowUnknownKeys)
implicit def default[T]: ProductHint[T] = apply()
}
| derekmorr/pureconfig | core/src/main/scala/pureconfig/ProductHint.scala | Scala | mpl-2.0 | 1,380 |
package com.sksamuel.elastic4s.tasks
import com.sksamuel.elastic4s.ElasticsearchClientUri
import com.sksamuel.elastic4s.http.{ElasticDsl, HttpClient}
import com.sksamuel.elastic4s.testkit.SharedElasticSugar
import org.scalatest.{FlatSpec, Matchers}
class TasksTest extends FlatSpec with SharedElasticSugar with Matchers with ElasticDsl {
import com.sksamuel.elastic4s.jackson.ElasticJackson.Implicits._
val http = HttpClient(ElasticsearchClientUri("elasticsearch://" + node.ipAndPort))
"list tasks" should "include all fields" in {
val resp = http.execute {
listTasks()
}.await
resp.nodes.head._2.host shouldBe "local"
resp.nodes.head._2.roles shouldBe Seq("master", "data", "ingest")
resp.nodes.head._2.tasks.values.forall(_.startTime.toMillis > 0) shouldBe true
resp.nodes.head._2.tasks.values.forall(_.runningTime.toMillis > 0) shouldBe true
}
}
| FabienPennequin/elastic4s | elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/tasks/TasksTest.scala | Scala | apache-2.0 | 893 |
package com.twitter.server.view
private[server] class CompositeView(views: Seq[View], separator: String = "") {
def render: String = views.map(_.render).mkString(separator)
}
| twitter/twitter-server | server/src/main/scala/com/twitter/server/view/CompositeView.scala | Scala | apache-2.0 | 180 |
package japgolly.scalajs.react.extra
import org.scalajs.dom.raw.EventTarget
import org.scalajs.dom.Event
import scala.scalajs.js
import scalaz.effect.IO
import japgolly.scalajs.react._
object EventListener {
def apply[E <: Event] = new OfEventType[E](true)
def defaultTarget[P, S, B, N <: TopNode]: ComponentScopeM[P,S,B,N] => EventTarget =
_.getDOMNode()
final class OfEventType[E <: Event](val _unused: Boolean) extends AnyVal {
/**
* Install an event listener when a component is mounted.
* Automatically uninstalls the event listener when the component is unmounted.
*
* @param eventType A string representing the
* <a href="https://developer.mozilla.org/en-US/docs/DOM/event.type">event type</a> to listen for.
* @param useCapture If true, useCapture indicates that the user wishes to initiate capture.
* After initiating capture, all events of the specified type will be dispatched to the registered
* listener before being dispatched to any EventTarget beneath it in the DOM tree.
* Events which are bubbling upward through the tree will not trigger a listener designated to use
* capture.
*/
def install[P, S, B <: OnUnmount, N <: TopNode](eventType : String,
listener : ComponentScopeM[P,S,B,N] => E => Unit,
target : ComponentScopeM[P,S,B,N] => EventTarget = defaultTarget[P,S,B,N],
useCapture: Boolean = false) =
OnUnmount.install[P,S,B,N] andThen (_.componentDidMount { $ =>
val et = target($)
val fe = listener($)
val f1: js.Function1[E, Unit] = (e: E) => fe(e)
val f2 = f1.asInstanceOf[js.Function1[Event, Unit]] // TODO Workaround for scala-js-dom 0.8.0
et.addEventListener(eventType, f1, useCapture)
$.backend.onUnmount(et.removeEventListener(eventType, f2, useCapture))
})
/** See [[install()]]. */
def installIO[P, S, B <: OnUnmount, N <: TopNode](eventType : String,
listener : ComponentScopeM[P,S,B,N] => E => IO[Unit],
target : ComponentScopeM[P,S,B,N] => EventTarget = defaultTarget[P,S,B,N],
useCapture: Boolean = false) =
install[P,S,B,N](
eventType,
$ => { val f = listener($); e => f(e).unsafePerformIO() },
target, useCapture)
}
/** See [[OfEventType.install()]]. */
def install[P, S, B <: OnUnmount, N <: TopNode](eventType : String,
listener : ComponentScopeM[P,S,B,N] => () => Unit,
target : ComponentScopeM[P,S,B,N] => EventTarget = defaultTarget[P,S,B,N],
useCapture: Boolean = false) =
EventListener[Event].install[P,S,B,N](
eventType,
$ => { val f = listener($); _ => f() },
target, useCapture)
/** See [[OfEventType.install()]]. */
def installIO[P, S, B <: OnUnmount, N <: TopNode](eventType : String,
listener : ComponentScopeM[P,S,B,N] => IO[Unit],
target : ComponentScopeM[P,S,B,N] => EventTarget = defaultTarget[P,S,B,N],
useCapture: Boolean = false) =
EventListener[Event].installIO[P,S,B,N](
eventType,
Function const listener(_),
target, useCapture)
}
| elacin/scalajs-react | extra/src/main/scala/japgolly/scalajs/react/extra/EventListener.scala | Scala | apache-2.0 | 3,758 |
package fr.laas.fape.anml.pending
import fr.laas.fape.anml.model.concrete.{VarRef, VariableUser}
import fr.laas.fape.anml.model.{AbstractParameterizedStateVariable, IntFunction, LVarRef, ParameterizedStateVariable}
import scala.collection.mutable
abstract class IntExpression extends Comparable[IntExpression] with VariableUser {
def trans(transformation: IntExpression => IntExpression) : IntExpression
def jTrans(t: java.util.function.Function[IntExpression,IntExpression]) : IntExpression = trans((expr:IntExpression) => t.apply(expr))
def bind(f: LVarRef => VarRef) : IntExpression
def isParameterized : Boolean
def lb: Int
def ub: Int
def isKnown = lb == ub
def get = {
assert(isKnown, "This Value is not known yet.")
lb
}
def asFunction : (VarRef => (Int,Int)) => (Int,Int)
def allParts : Iterable[IntExpression]
def allVariables : Set[VarRef] = allParts.collect { case Variable(v, _, _) => v }.toSet
def plus(o: IntExpression) : IntExpression = IntExpression.sum(this, o)
def plus(o: Int) : IntExpression = IntExpression.sum(this, IntExpression.lit(o))
def isInf() = lb >= Int.MaxValue/2-1
override def compareTo(t: IntExpression): Int =
if(ub < t.lb)
ub.compareTo(t.lb)
else if(lb > t.ub)
lb.compareTo(t.ub)
else
0
override def usedVariables = allVariables.map(_.asInstanceOf[fr.laas.fape.anml.model.concrete.Variable])
}
abstract class UnaryIntExpression extends IntExpression
abstract class BinaryIntExpression extends IntExpression
abstract class Constraint
case class GreaterEqualConstraint(e: IntExpression, value: Int) extends Constraint
case class LesserEqualConstraint(e: IntExpression, value: Int) extends Constraint
object IntExpression {
val maxes: mutable.Map[(IntExpression,IntExpression),IntExpression] = mutable.Map()
val minus: mutable.Map[(IntExpression,IntExpression),IntExpression] = mutable.Map()
val sums: mutable.Map[(IntExpression,IntExpression),IntExpression] = mutable.Map()
val inversions: mutable.Map[IntExpression,IntExpression] = mutable.Map()
val locStateVars: mutable.Map[(AbstractParameterizedStateVariable,Int,Int), LStateVariable] = mutable.Map()
val literals: mutable.Map[Int, IntExpression] = mutable.Map()
val stateVariables: mutable.Map[ParameterizedStateVariable, IntExpression] = mutable.Map()
val variables: mutable.Map[(VarRef,Int,Int), IntExpression] = mutable.Map()
def safesum(x:Int, y:Int) : Int = {
val z = x.toLong + y.toLong
if(z < Int.MinValue)
Int.MinValue
else if(z > Int.MaxValue)
Int.MaxValue
else z.toInt
}
def safeinvert(x: Int) =
if(x == Int.MaxValue)
Int.MinValue
else if(x == Int.MinValue)
Int.MaxValue
else
-x
def geConstraint(e: IntExpression, value:Int) : Seq[Constraint] = e match {
case _ if e.lb >= value => Nil
case Min(x, y) => geConstraint(x, value) ++ geConstraint(y, value)
case Sum(x, y) if x.isKnown => geConstraint(y, value - x.get)
case Sum(x, y) if y.isKnown => geConstraint(x, value - y.get)
case Sum(x, y) if x.ub + y.ub == value => geConstraint(x, x.ub) ++ geConstraint(y, y.ub)
case Invert(x) => leConstraint(x, - value)
case x => List(new GreaterEqualConstraint(e, value))
}
def leConstraint(e: IntExpression, value:Int) : Seq[Constraint] = e match {
case _ if e.ub <= value => Nil
case Max(x, y) => leConstraint(x, value) ++ leConstraint(y, value)
case Sum(x, y) if x.isKnown => leConstraint(y, value - x.get)
case Sum(x, y) if y.isKnown => leConstraint(x, value - y.get)
case Sum(x, y) if x.lb + y.lb == value => leConstraint(x, x.lb) ++ leConstraint(y, y.lb)
case Invert(x) => geConstraint(x, -value)
case x => List(new LesserEqualConstraint(e, value))
}
def lit(i: Int) = literals.getOrElseUpdate(i, { IntLiteral(i) })
def locSV(lsv: AbstractParameterizedStateVariable) : LStateVariable = locSV(lsv, lsv.func.asInstanceOf[IntFunction].minValue, lsv.func.asInstanceOf[IntFunction].maxValue)
def locSV(lsv: AbstractParameterizedStateVariable, lb:Int, ub:Int) = locStateVars.getOrElseUpdate((lsv,lb,ub), { LStateVariable(lsv,lb,ub) })
def stateVariable(sv: ParameterizedStateVariable) = stateVariables.getOrElseUpdate(sv, { StateVariable(sv) })
def variable(v: VarRef, lb:Int, ub:Int) = variables.getOrElseUpdate((v,lb,ub), { Variable(v,lb,ub) })
def sum(v1: IntExpression, v2:IntExpression) : IntExpression =
if(v1.isInf())
v1
else if(v2.isInf())
v2
else
(v1, v2) match {
case (IntLiteral(d1), IntLiteral(d2)) => lit(d1 + d2)
case (x:BinaryIntExpression, y:UnaryIntExpression) => sum(y,x) //always put literals first
case (x:Sum, y:BinaryIntExpression) if !y.isInstanceOf[Sum] => sum(y,x)
case (x, y:IntLiteral) => sum(y, x)
case (IntLiteral(0), x) => x
case (x:IntLiteral, Max(y,z)) => max(sum(x,y), sum(x,z))
case (x:IntLiteral, Min(y,z)) => min(sum(x,y), sum(x,z))
case (x:UnaryIntExpression, Min(y,z)) => min(sum(x,y), sum(x,z))
case (IntLiteral(x), Sum(IntLiteral(y),z)) => sum(lit(x+y), z)
case (x:UnaryIntExpression, Sum(y:IntLiteral, z)) => sum(y, sum(x,z))
case (Sum(IntLiteral(x), y), Sum(IntLiteral(z), w)) => sum(lit(x+z), sum(y,w))
case (Min(x,y), z:Sum) => min(sum(x,z), sum(y,z))
case (Min(x,y), Min(z,w)) => min(min(sum(x,z), sum(x,w)), min(sum(y,z), sum(y,w)))
case (Max(x,y), z:Sum) => min(sum(x,z), sum(y,z))
case (x, Invert(y)) if x == y => lit(0)
case (Invert(y), x) if x == y => lit(0)
case _ => sums.getOrElseUpdate((v1,v2), {Sum(v1,v2)})
}
def max(v1: IntExpression, v2: IntExpression) : IntExpression =
if(lesserEqual(v1,v2))
v2
else if(lesserEqual(v2,v1))
v1
else
(v1,v2) match {
case (IntLiteral(d1), IntLiteral(d2)) => if(d1 > d2) v1 else v2
case (x:BinaryIntExpression, y:UnaryIntExpression) => max(y, x)
case (x, y:IntLiteral) => max(y,x)
case (x, Max(y,z)) if (x eq y) || (x eq z) => max(y,z)
case (x, Min(y,z)) if x eq y => z
case (x, Min(y,z)) if x eq z => y
case (Max(y,z), x) if (x eq y) || (x eq z) => max(y,z)
case (Min(y,z), x) if x eq y => z
case (Min(y,z), x) if x eq z => y
case _ if v1 eq v2 => v1
case _ if lesserEqual(v1,v2) => v2
case _ => maxes.getOrElseUpdate((v1,v2), {Max(v1,v2)})
}
def min(v1: IntExpression, v2: IntExpression) : IntExpression =
if(v1.isInf())
v2
else if(v2.isInf())
v1
else
(v1,v2) match {
case (IntLiteral(d1), IntLiteral(d2)) => if(d1 < d2) v1 else v2
case (x:BinaryIntExpression, y:UnaryIntExpression) => min(y, x)
case (x, y:IntLiteral) => min(y,x)
case (x:IntLiteral, Min(y:IntLiteral, z)) => min(min(x,y), z)
case (Min(x,y), Min(w,z)) if y == z => min(min(x,w), z)
case _ if v1 eq v2 => v1
case _ if lesserEqual(v1, v2) => v1
case _ => minus.getOrElseUpdate((v1,v2), Min(v1,v2))
}
def minus(v1: IntExpression) : IntExpression = v1 match {
case IntLiteral(d) => lit(-d)
case Invert(v) => v
case Max(x,y) => min(minus(x), minus(y))
case Min(x,y) => max(minus(x), minus(y))
case Sum(x,y) => sum(minus(x), minus(y))
case _ => inversions.getOrElseUpdate(v1, {Invert(v1)})
}
def equals(v1: IntExpression, v2: IntExpression) = (v1,v2) match {
case (IntLiteral(d1), IntLiteral(d2)) => d1 == d2
case _ if v1 eq v2 => true
case _ if lesserEqual(v1,v2) && lesserEqual(v2,v1) => true
case _ => v1.isKnown && v2.isKnown && v1.get == v2.get
}
def lesserThan(v1: IntExpression, v2: IntExpression) = v1.ub < v2.lb
def lesserEqual(v1: IntExpression, v2: IntExpression) =
if(v1.isInf())
false
else if(v2.isInf())
false
else if(v1 eq v2)
true
else if(v1.ub <= v2.lb)
true
else if(v1.lb > v2.ub)
false
else (v1,v2) match {
case _ => false
}
}
case class IntLiteral private[pending] (val value: Int) extends UnaryIntExpression {
override def bind(f: (LVarRef) => VarRef): IntExpression = this
override def isParameterized: Boolean = false
override def lb: Int = value
override def ub: Int = value
override def asFunction: ((VarRef) => (Int, Int)) => (Int, Int) = _ => (value,value)
override def allParts: Iterable[IntExpression] = List(this)
override def trans(transformation: (IntExpression) => IntExpression): IntExpression = transformation(this)
override def toString = value.toString
}
case class LStateVariable private[pending] (val lsv: AbstractParameterizedStateVariable, lb: Int, ub: Int) extends UnaryIntExpression {
override def bind(f: (LVarRef) => VarRef): IntExpression = new StateVariable(new ParameterizedStateVariable(lsv.func, lsv.args.map(f).toArray))
override def isParameterized: Boolean = true
override def asFunction: ((VarRef) => (Int, Int)) => (Int, Int) = throw new RuntimeException("This should have been binded and replaced by a variable first.")
override def allParts: Iterable[IntExpression] = List(this)
override def trans(transformation: (IntExpression) => IntExpression): IntExpression = transformation(this)
}
case class StateVariable private[pending] (val sv: ParameterizedStateVariable) extends UnaryIntExpression {
override def bind(f: (LVarRef) => VarRef): IntExpression = this
override def isParameterized: Boolean = true
override def lb: Int = sv.func.asInstanceOf[IntFunction].minValue
override def ub: Int = sv.func.asInstanceOf[IntFunction].maxValue
override def asFunction: ((VarRef) => (Int, Int)) => (Int, Int) = throw new RuntimeException("This should have been replaced by a variable first.")
override def allParts: Iterable[IntExpression] = List(this)
override def trans(transformation: (IntExpression) => IntExpression): IntExpression = transformation(this)
}
case class Variable private[pending] (variable: VarRef, lb: Int, ub: Int) extends UnaryIntExpression {
require(variable.typ.isNumeric)
override def bind(f: (LVarRef) => VarRef): IntExpression = this
override def isParameterized: Boolean = false
override def asFunction: ((VarRef) => (Int, Int)) => (Int, Int) = f => f(variable)
override def allParts: Iterable[IntExpression] = List(this)
override def trans(transformation: (IntExpression) => IntExpression): IntExpression = transformation(this)
}
case class Max private[pending] (val left: IntExpression, val right: IntExpression) extends BinaryIntExpression {
override def bind(f: (LVarRef) => VarRef): IntExpression = IntExpression.max(left.bind(f), right.bind(f))
override def isParameterized: Boolean = left.isParameterized || right.isParameterized
override val lb = Math.max(left.lb, right.lb)
override val ub = Math.max(left.ub, right.ub)
override def asFunction: ((VarRef) => (Int, Int)) => (Int, Int) =
f => {
val (min1,max1) = left.asFunction.apply(f)
val (min2,max2) = right.asFunction.apply(f)
(Math.max(min1,min2), Math.max(max1,max2))
}
override def allParts: Iterable[IntExpression] = this :: List(left, right).flatMap(_.allParts)
override def trans(t: (IntExpression) => IntExpression): IntExpression = IntExpression.max(left.trans(t), right.trans(t))
}
case class Min private[pending] (val left: IntExpression, val right: IntExpression) extends BinaryIntExpression {
override def bind(f: (LVarRef) => VarRef): IntExpression = IntExpression.min(left.bind(f), right.bind(f))
override def isParameterized: Boolean = left.isParameterized || right.isParameterized
override val lb = Math.min(left.lb, right.lb)
override val ub = Math.min(left.ub, right.ub)
override def asFunction: ((VarRef) => (Int, Int)) => (Int, Int) =
f => {
val (min1,max1) = left.asFunction.apply(f)
val (min2,max2) = right.asFunction.apply(f)
(Math.min(min1,min2), Math.min(max1,max2))
}
override def allParts: Iterable[IntExpression] = this :: List(left, right).flatMap(_.allParts)
override def trans(t: (IntExpression) => IntExpression): IntExpression = IntExpression.min(left.trans(t), right.trans(t))
}
case class Invert private[pending] (val value: IntExpression) extends UnaryIntExpression {
override def bind(f: (LVarRef) => VarRef): IntExpression = IntExpression.minus(value.bind(f))
override def isParameterized: Boolean = value.isParameterized
override val lb = IntExpression.safeinvert(value.ub)
override val ub = IntExpression.safeinvert(value.lb)
override def asFunction: ((VarRef) => (Int, Int)) => (Int, Int) =
f => {
val (min, max) = value.asFunction.apply(f)
(-min, -max)
}
override def allParts: Iterable[IntExpression] = this :: List(value).flatMap(_.allParts)
override def trans(t: (IntExpression) => IntExpression): IntExpression = IntExpression.minus(value.trans(t))
}
case class Sum private[pending] (val left: IntExpression, val right: IntExpression) extends BinaryIntExpression {
override def bind(f: (LVarRef) => VarRef): IntExpression = IntExpression.sum(left.bind(f), right.bind(f))
override def isParameterized: Boolean = left.isParameterized && right.isParameterized
override val lb = IntExpression.safesum(left.lb, right.lb)
override val ub = IntExpression.safesum(left.ub, right.ub)
override def asFunction: ((VarRef) => (Int, Int)) => (Int, Int) =
f => {
val (min1,max1) = left.asFunction.apply(f)
val (min2,max2) = right.asFunction.apply(f)
(min1 + min2, max1 + max2)
}
override def allParts: Iterable[IntExpression] = this :: List(left, right).flatMap(_.allParts)
override def trans(t: (IntExpression) => IntExpression): IntExpression = IntExpression.sum(left.trans(t), right.trans(t))
}
| athy/fape | anml-parser/src/main/scala/fr/laas/fape/anml/pending/IntExpression.scala | Scala | bsd-2-clause | 13,799 |
package com.imaginea.activegrid.core.models
/**
* Created by shareefn on 8/12/16.
*/
case class CommandResult(result: List[Line], currentContext: Option[CommandExecutionContext])
| eklavya/activeGrid | src/main/scala/com/imaginea/activegrid/core/models/CommandResult.scala | Scala | apache-2.0 | 184 |
package eu.stratosphere.benchmarks.systemml.cli.command.visualizationUtil
import java.io.File
import scalax.chart.api._
import scalax.chart.XYChart
/**
* Created by felix on 11.10.16.
*/
class Chart(metricId: Int, chart: XYChart, folder: String, file: String) {
val metricID = metricId
var maxRuntime: Int = -1
var maxValue: Double = -1
def setMax(maxRuntimeP: Int, maxValueP: Double) = {
maxRuntime = maxRuntimeP
maxValue = maxValueP
}
def plotAndSave(isComparable: Boolean) {
/*
var legend = new LegendItemCollection
val legenditem1 = new LegendItem("data-item")
legend.add(legenditem1)
chartSum.plot.setFixedLegendItems(legend)
*/
val yAxis = chart.plot.getRangeAxis()
if (isComparable) {
if (maxValue > 0) {
yAxis.setRange(0, maxValue)
}
}
//yAxis.setLabel("test")
val xAxis = chart.plot.getDomainAxis()
if (isComparable) {
xAxis.setRange(0, maxRuntime)
}
xAxis.setLabel("run time in seconds")
new File(folder).mkdirs
chart.saveAsPNG(folder + "/" + file)
}
}
| fschueler/sysml-benchmark | sysml-benchmark-peelextensions/src/main/scala/eu/stratosphere/benchmarks/systemml/cli/command/visualizationUtil/Chart.scala | Scala | apache-2.0 | 1,098 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package models.declaration
import jto.validation.forms._
import jto.validation.{From, Rule, To, Write}
import models.declaration.release7.{RoleType, RoleWithinBusinessRelease7}
case class AddPerson(firstName: String,
middleName: Option[String],
lastName: String,
roleWithinBusiness: RoleWithinBusinessRelease7
)
object AddPerson {
import play.api.libs.json._
val key = "add-person"
implicit def convert(role: RoleWithinBusiness): RoleType = {
role match {
case BeneficialShareholder => models.declaration.release7.BeneficialShareholder
case Director => models.declaration.release7.Director
case Partner => models.declaration.release7.Partner
case InternalAccountant => models.declaration.release7.InternalAccountant
case ExternalAccountant => models.declaration.release7.ExternalAccountant
case SoleProprietor => models.declaration.release7.SoleProprietor
case NominatedOfficer => models.declaration.release7.NominatedOfficer
case Other(x) => models.declaration.release7.Other(x)
}
}
implicit def formRule: Rule[UrlFormEncoded, AddPerson] = From[UrlFormEncoded] { __ =>
import models.FormTypes._
import jto.validation.forms.Rules._
val roleReader: Rule[UrlFormEncoded, RoleWithinBusinessRelease7] = {
__.read[RoleWithinBusinessRelease7]
}
(
(__ \\ "firstName").read(genericNameRule("error.required.declaration.first_name", maxLengthMsg = "error.invalid.firstname.length", regExMessage="error.invalid.firstname.validation")) ~
(__ \\ "middleName").read(optionR(genericNameRule(maxLengthMsg="error.invalid.middlename.length", regExMessage="error.invalid.middlename.validation"))) ~
(__ \\ "lastName").read(genericNameRule("error.required.declaration.last_name", maxLengthMsg="error.invalid.lastname.length", regExMessage="error.invalid.lastname.validation")) ~
roleReader
) (AddPerson.apply)
}
implicit val formWrites: Write[AddPerson, UrlFormEncoded] = To[UrlFormEncoded] { __ =>
import jto.validation.forms.Writes._
import play.api.libs.functional.syntax.unlift
(
(__ \\ "firstName").write[String] ~
(__ \\ "middleName").write[Option[String]] ~
(__ \\ "lastName").write[String] ~
__.write[RoleWithinBusinessRelease7]
) (unlift(AddPerson.unapply))
}
implicit val jsonReads: Reads[AddPerson] = {
import play.api.libs.functional.syntax._
import play.api.libs.json.Reads._
import play.api.libs.json._
(
(__ \\ "firstName").read[String] and
(__ \\ "middleName").readNullable[String] and
(__ \\ "lastName").read[String] and
__.read[RoleWithinBusinessRelease7]
) (AddPerson.apply _)
}
implicit val jsonWrites: Writes[AddPerson] = {
import play.api.libs.functional.syntax._
import play.api.libs.json.Writes._
import play.api.libs.json._
(
(__ \\ "firstName").write[String] and
(__ \\ "middleName").write[Option[String]] and
(__ \\ "lastName").write[String] and
__.write[RoleWithinBusinessRelease7]
) (unlift(AddPerson.unapply))
}
}
| hmrc/amls-frontend | app/models/declaration/AddPerson.scala | Scala | apache-2.0 | 3,810 |
Subsets and Splits