code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.audit
import java.time.ZonedDateTime
import org.apache.accumulo.core.client.Connector
import org.apache.accumulo.core.data.Range
import org.apache.accumulo.core.security.Authorizations
import org.locationtech.geomesa.utils.audit.AuditedEvent
import org.locationtech.geomesa.utils.collection.{IsSynchronized, MaybeSynchronized, NotSynchronized}
/**
* Manages reading of usage stats
*/
class AccumuloEventReader(connector: Connector, table: String) {
private val tableExists: MaybeSynchronized[Boolean] =
if (connector.tableOperations().exists(table)) { new NotSynchronized(true) } else { new IsSynchronized(false) }
def query[T <: AuditedEvent](typeName: String,
dates: (ZonedDateTime, ZonedDateTime),
auths: Authorizations)
(implicit transform: AccumuloEventTransform[T]): Iterator[T] = {
if (!checkTable) { Iterator.empty } else {
val scanner = connector.createScanner(table, auths)
val rangeStart = s"$typeName~${dates._1.format(AccumuloEventTransform.dateFormat)}"
val rangeEnd = s"$typeName~${dates._2.format(AccumuloEventTransform.dateFormat)}"
scanner.setRange(new Range(rangeStart, rangeEnd))
transform.iterator(scanner)
}
}
private def checkTable: Boolean = {
if (tableExists.get) {
true
} else if (connector.tableOperations().exists(table)) {
tableExists.set(true, false)
true
} else {
false
}
}
}
| aheyne/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/main/scala/org/locationtech/geomesa/accumulo/audit/AccumuloEventReader.scala | Scala | apache-2.0 | 2,009 |
package tubesocks
import com.ning.http.client.{ AsyncHttpClient, AsyncHttpClientConfig }
import com.ning.http.client.websocket.{
WebSocket, DefaultWebSocketListener, WebSocketUpgradeHandler }
import java.net.URI
/** A builder of sorts for (Web)Sockets */
object Sock {
/** A partial function signature for handing Socket events */
type Handler = PartialFunction[Event, Any]
object Listen {
lazy val discard: Handler = {
case e: Event => ()
}
case class ReconnectingListen(times: Int, pausing: Int) {
def apply(pf: Handler) = {
def complete(e: Event) = (pf orElse discard)(e)
// note: we would just use the TextListener below BUT
// it's very convenient to make #onMessage(m) respondable
new DefaultWebSocketListener {
override def onMessage(m: String) = complete(Message(m, new DefaultSocket(this.webSocket)))
override def onOpen(ws: WebSocket) = complete(Open(new DefaultSocket(ws)))
override def onClose(ws: WebSocket) = complete(Close(new DefaultSocket(ws)))
override def onError(t: Throwable) = complete(Error(t))
override def onFragment(fragment: String, last: Boolean) =
complete(if (last) EOF(fragment) else Fragment(fragment))
}
}
}
def reconnecting(times: Int, pausing: Int) = ReconnectingListen(times, pausing)
def apply(pf: Handler) = {
def complete(e: Event) = (pf orElse discard)(e)
// note: we would just use the TextListener below BUT
// it's very convenient to make #onMessage(m) respondable
new DefaultWebSocketListener {
override def onMessage(m: String) = complete(Message(m, new DefaultSocket(this.webSocket)))
override def onOpen(ws: WebSocket) = complete(Open(new DefaultSocket(ws)))
override def onClose(ws: WebSocket) = complete(Close(new DefaultSocket(ws)))
override def onError(t: Throwable) = complete(Error(t))
override def onFragment(fragment: String, last: Boolean) =
complete(if (last) EOF(fragment) else Fragment(fragment))
}
}
}
def reconnecting(times: Int = -1, pausing: Int = 0)(uri: URI)(f: Handler): Socket =
configure(identity)(times, pausing)(uri)(f)
/** URI factory for returning a websocket
* @param str string uri
* @return a function that takes a Handler and returns a Socket */
def uri(str: String) =
apply(new URI(if (str.startsWith("ws")) str else "ws://%s" format str))_
/** Default client-configured Socket
* @param uri websocket endpoint
* @param f Handler function */
def apply(uri: URI)(f: Handler): Socket =
configure(identity)()(uri)(f)
/** Provides a means of customizing client configuration
* @param conf configuration building function
* @param uri websocket endpoint
* @param f Handler function */
def configure(conf: AsyncHttpClientConfig.Builder => AsyncHttpClientConfig.Builder)
(reconnectAttempts: Int = 0, pausing: Int = 0)
(uri: URI)(f: Handler): Socket =
new DefaultSocket(mkClient(conf(defaultConfig))
.prepareGet(uri.toString)
.execute(new WebSocketUpgradeHandler.Builder()
.addWebSocketListener(Listen.reconnecting(reconnectAttempts, pausing)(f))
.build())
.get())
private def defaultConfig =
new AsyncHttpClientConfig.Builder()
.setUserAgent("Tubesocks/0.1")
private def mkClient(config: AsyncHttpClientConfig.Builder) =
new AsyncHttpClient(config.build())
}
| jarin/unfiltered | netty-websockets/src/test/scala/tubesocks/tubesocks.scala | Scala | mit | 3,604 |
package powercards.cards
import powercards.{Game, BasicVictoryCard, ActionCard}
class Island extends ActionCard with BasicVictoryCard {
val cost = 4
val vps = 2
def play(game: Game): Unit = ???
}
| whence/powerlife | scala/powercards_oo/src/main/scala/powercards/cards/Island.scala | Scala | mit | 205 |
package com.automatak.render.dnp3.objects.groups
import com.automatak.render.dnp3.objects._
import com.automatak.render.dnp3.objects.VariationNames._
import FixedSizeField._
import com.automatak.render.dnp3.objects.generators.ConversionToCounter
// counters
object Group20 extends ObjectGroup {
def objects = List(Group20Var0, Group20Var1, Group20Var2, Group20Var5, Group20Var6)
def group: Byte = 20
def desc: String = "Counter"
def isEventGroup: Boolean = false
}
object Group20Var0 extends AnyVariation(Group20, 0)
object Group20Var1 extends FixedSize(Group20, 1, bit32WithFlag)(flags, count32) with ConversionToCounter
object Group20Var2 extends FixedSize(Group20, 2, bit16WithFlag)(flags, count16) with ConversionToCounter
object Group20Var5 extends FixedSize(Group20, 5, bit32WithoutFlag)(count32) with ConversionToCounter
object Group20Var6 extends FixedSize(Group20, 6, bit16WithoutFlag)(count16) with ConversionToCounter
| thiagoralves/OpenPLC_v2 | dnp3/generation/dnp3/src/main/scala/com/automatak/render/dnp3/objects/groups/Group20.scala | Scala | gpl-3.0 | 942 |
package tests.rescala
import java.util.concurrent.atomic.AtomicInteger
import rescala.Infiltrator.assertLevel
class SignalTestSuite extends RETests {
allEngines("signal Re Evaluates The Expression"){ engine => import engine._
val v = Var(0)
var i = 1
val s: Signal[Int] = v.map { _ => i }
i = 2
v.set(2)
assert(s.now == 2)
}
allEngines("the Expression Is Note Evaluated Every Time Get Val Is Called"){ engine => import engine._
var a = 10
val s: Signal[Int] = Signals.static()(_ => 1 + 1 + a)
assert(s.now === 12)
a = 11
assert(s.now === 12)
}
allEngines("simple Signal Returns Correct Expressions"){ engine => import engine._
val s: Signal[Int] = Signals.static()(_ => 1 + 1 + 1)
assert(s.now === 3)
}
allEngines("the Expression Is Evaluated Only Once"){ engine => import engine._
var a = 0
val v = Var(10)
val s1: Signal[Int] = v.map { i =>
a += 1
i % 10
}
assert(a == 1)
v.set(11)
assert(a == 2)
v.set(21)
assert(a == 3)
assert(s1.now === 1)
}
allEngines("handlers Are Executed"){ engine => import engine._
val test = new AtomicInteger(0)
val v = Var(1)
val s1 = v.map { 2 * _ }
val s2 = v.map { 3 * _ }
val s3 = Signals.lift(s1, s2) { _ + _ }
s1.changed += { (_) => test.incrementAndGet() }
s2.changed += { (_) => test.incrementAndGet() }
s3.changed += { (_) => test.incrementAndGet() }
assert(test.get == 0)
v.set(3)
assert(test.get == 3)
}
allEngines("level Is Correctly Computed"){ engine => import engine._
val v = Var(1)
val s1 = v.map { 2 * _ }
val s2 = v.map { 3 * _ }
val s3 = Signals.lift(s1, s2) { _ + _ }
assertLevel(v, 0)
assertLevel(s1, 1)
assertLevel(s2, 1)
assertLevel(s3, 2)
}
allEngines("no Change Propagations"){ engine => import engine._
val v = Var(1)
val s = v.map(_ => 1)
val s2 = Signal { s() }
assert(s2.now === 1)
assert(s.now === 1)
v.set(2)
assert(s.now === 1)
assert(s2.now === 1)
v.set(2)
assert(s2.now === 1)
assert(s.now === 1)
v.set(3)
assert(s2.now === 1)
assert(s.now === 1)
}
}
| volkc/REScala | Tests/shared/src/test/scala/tests/rescala/SignalTestSuite.scala | Scala | apache-2.0 | 2,219 |
package monocle.generic
import monocle.MonocleSuite
import monocle.law.discipline.IsoTests
import org.scalacheck.Arbitrary
import scalaz.Equal
class ProductSpec extends MonocleSuite {
case class Person(name: String, age: Int)
implicit val personEq: Equal[Person] = Equal.equalA
implicit val personArb: Arbitrary[Person] = Arbitrary(for{
n <- Arbitrary.arbitrary[String]
a <- Arbitrary.arbitrary[Int]
} yield Person(n, a))
checkAll("toTuple", IsoTests(product.productToTuple[Person]))
}
| NightRa/Monocle | test/src/test/scala/monocle/generic/ProductSpec.scala | Scala | mit | 510 |
package uber.nosurge.actors
import akka.actor.{Actor, Props}
import shared.{NotSubscribed, Subscribed}
import uber.nosurge.Settings
import uber.nosurge.actors.Message._
import uber.nosurge.actors.Models.{ClientState, InitialData}
import uber.nosurge.services.{FCMService, RideEstimatesService}
class Receptionist(ridersService: RideEstimatesService, fcmService: FCMService)(implicit val settings: Settings) extends Actor {
private var clients = Map.empty[Token, ClientState]
override def receive = {
case CheckStatus(token) =>
clients.get(token) match {
case Some(clientState) => sender ! Subscribed(token, clientState.priceEstimate, clientState.fixedDestination)
case None => sender ! NotSubscribed(token)
}
case subscribe@Subscribe(token, uberType, fixedDestination, priceEstimates) =>
if (clients.contains(token)) sender ! Failed("You are already subscribed to a notification!")
else {
priceEstimates.find(_.display_name.toLowerCase == uberType.toLowerCase) match {
case Some(initialPrice) =>
if (initialPrice.surge_multiplier.exists(_ > 1.0)) {
val surgeMultiplier = initialPrice.surge_multiplier.get
val initialState = InitialData(subscribe.token, subscribe.uberType, initialPrice, surgeMultiplier, fixedDestination)
val controller = context.system.actorOf(Controller.props(self, initialState, ridersService, fcmService))
clients += subscribe.token -> ClientState(initialPrice, fixedDestination, controller)
sender ! Acknowledged
} else {
sender ! Failed("Failed to subscribe. Price is not surged!")
}
case None => sender ! Failed("Failed to subscribe.")
}
}
case Unsubscribe(token) =>
clients.get(token).foreach { cs => context.stop(cs.worker) }
clients -= token
sender ! Acknowledged
}
}
object Receptionist {
def props(ridersService: RideEstimatesService, fCMService: FCMService)(implicit settings: Settings) =
Props(new Receptionist(ridersService, fCMService))
} | allantl/uber-nosurge-notifications | backend/src/main/scala/uber/nosurge/actors/Receptionist.scala | Scala | apache-2.0 | 2,123 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.stream.table
import org.apache.flink.api.scala._
import org.apache.flink.table.api.scala._
import org.apache.flink.table.expressions.{Upper, WindowReference}
import org.apache.flink.table.plan.logical.TumblingGroupWindow
import org.apache.flink.table.utils.TableTestUtil._
import org.apache.flink.table.utils.TableTestBase
import org.junit.Test
class CalcTest extends TableTestBase {
// ----------------------------------------------------------------------------------------------
// Tests for all the situations when we can do fields projection. Like selecting few fields
// from a large field count source.
// ----------------------------------------------------------------------------------------------
@Test
def testSelectFromWindow(): Unit = {
val util = streamTestUtil()
val sourceTable =
util.addTable[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd, 'rowtime.rowtime)
val resultTable = sourceTable
.window(Tumble over 5.millis on 'rowtime as 'w)
.groupBy('w)
.select(Upper('c).count, 'a.sum)
val expected =
unaryNode(
"DataStreamGroupWindowAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "c", "a", "rowtime", "UPPER(c) AS $f3")
),
term("window",
TumblingGroupWindow(
WindowReference("w"),
'rowtime,
5.millis)),
term("select", "COUNT($f3) AS TMP_0", "SUM(a) AS TMP_1")
)
util.verifyTable(resultTable, expected)
}
@Test
def testSelectFromGroupedWindow(): Unit = {
val util = streamTestUtil()
val sourceTable =
util.addTable[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd, 'rowtime.rowtime)
val resultTable = sourceTable
.window(Tumble over 5.millis on 'rowtime as 'w)
.groupBy('w, 'b)
.select(Upper('c).count, 'a.sum, 'b)
val expected = unaryNode(
"DataStreamCalc",
unaryNode(
"DataStreamGroupWindowAggregate",
unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "c", "a", "b", "rowtime", "UPPER(c) AS $f4")
),
term("groupBy", "b"),
term("window",
TumblingGroupWindow(
WindowReference("w"),
'rowtime,
5.millis)),
term("select", "b", "COUNT($f4) AS TMP_0", "SUM(a) AS TMP_1")
),
term("select", "TMP_0", "TMP_1", "b")
)
util.verifyTable(resultTable, expected)
}
@Test
def testMultiFilter(): Unit = {
val util = streamTestUtil()
val sourceTable = util.addTable[(Int, Long, String, Double)]("MyTable", 'a, 'b, 'c, 'd)
val resultTable = sourceTable.select('a, 'b)
.filter('a > 0)
.filter('b < 2)
.filter(('a % 2) === 1)
val expected = unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "b"),
term("where", "AND(AND(>(a, 0), <(b, 2)), =(MOD(a, 2), 1))")
)
util.verifyTable(resultTable, expected)
}
@Test
def testIn(): Unit = {
val util = streamTestUtil()
val sourceTable = util.addTable[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
val resultTable = sourceTable.select('a, 'b, 'c)
.where(s"${(1 to 30).map("b = " + _).mkString(" || ")} && c = 'xx'")
val expected = unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "b", "c"),
term("where", s"AND(IN(b, ${(1 to 30).mkString(", ")}), =(c, 'xx'))")
)
util.verifyTable(resultTable, expected)
}
@Test
def testNotIn(): Unit = {
val util = streamTestUtil()
val sourceTable = util.addTable[(Int, Long, String)]("MyTable", 'a, 'b, 'c)
val resultTable = sourceTable.select('a, 'b, 'c)
.where(s"${(1 to 30).map("b != " + _).mkString(" && ")} || c != 'xx'")
val expected = unaryNode(
"DataStreamCalc",
streamTableNode(0),
term("select", "a", "b", "c"),
term("where", s"OR(NOT IN(b, ${(1 to 30).mkString(", ")}), <>(c, 'xx'))")
)
util.verifyTable(resultTable, expected)
}
}
| mylog00/flink | flink-libraries/flink-table/src/test/scala/org/apache/flink/table/api/stream/table/CalcTest.scala | Scala | apache-2.0 | 4,992 |
package scala.scalajs.js.typedarray
import scala.scalajs.js
/** <span class="badge badge-ecma6" style="float: right;">ECMAScript 6</span>
* An ArrayBufferView allows accessing the data of an [[ArrayBuffer]]
*/
@js.native
trait ArrayBufferView extends js.Object {
/** The underlying buffer of this ArrayBufferView */
val buffer: ArrayBuffer = js.native
/** The number of bytes of this ArrayBufferView */
val byteLength: Int = js.native
/** The offset of this ArrayBufferView in the underlying buffer */
val byteOffset: Int = js.native
}
| mdedetrich/scala-js | library/src/main/scala/scala/scalajs/js/typedarray/ArrayBufferView.scala | Scala | bsd-3-clause | 555 |
import collection.AbstractMap
println(/* resolved: false */ AbstractMap.getClass)
println(classOf[/* path: scala.collection.AbstractMap, accessible: false*/ AbstractMap]) | LPTK/intellij-scala | testdata/resolve2/import/element/Trait.scala | Scala | apache-2.0 | 172 |
package com.github.vooolll.domain.friends
import com.github.vooolll.domain.FacebookPaging
import com.github.vooolll.domain.profile.FacebookUser
final case class FacebookFriends(
friends: List[FacebookUser],
paging: Option[FacebookPaging],
summary: Option[FacebookFriendsSummary]
)
final case class FacebookFriendsSummary(totalCount: Int)
| vooolll/facebook4s | src/main/scala/com/github/vooolll/domain/friends/FacebookFriends.scala | Scala | apache-2.0 | 347 |
/*
* Copyright (C) Lightbend Inc. <https://www.lightbend.com>
*/
package docs.home.scaladsl.persistence
import scala.concurrent.Future
import akka.NotUsed
import akka.stream.scaladsl.Source
import com.lightbend.lagom.scaladsl.api.Service
import com.lightbend.lagom.scaladsl.api.ServiceCall
trait BlogService extends Service {
def getPostSummaries(): ServiceCall[NotUsed, Source[PostSummary, _]]
override def descriptor = ???
}
| lagom/lagom | docs/manual/scala/guide/cluster/code/docs/home/scaladsl/persistence/BlogService.scala | Scala | apache-2.0 | 437 |
package hercules.actors.qualitycontrol
import hercules.entities.ProcessingUnit
import akka.actor.Props
object MiSeqQualityControllerActor {
def props(): Props =
Props(new MiSeqQualityControllerActor())
}
/**
*
* TODO: This is not yet implemented.
*
* Concrete implementation for doing quality control on a Illumina
* MiSeq runfolder
*/
class MiSeqQualityControllerActor extends IlluminaQualityControllerActor {
def passesQualityControl(processingUnit: ProcessingUnit) = ???
def receive = ???
} | johandahlberg/hercules | src/main/scala/hercules/actors/qualitycontrol/MiSeqQualityControllerActor.scala | Scala | mit | 515 |
/*
* Copyright (c) 2014-2016
* nonblocking.at gmbh [http://www.nonblocking.at]
*
* This file is part of Cliwix.
*
* Cliwix is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package at.nonblocking.cliwix.webapp
import java.io.Serializable
import javax.inject.Inject
import javax.servlet.http.HttpServletRequest
import at.nonblocking.cliwix.core.{CliwixLiferayNotReadyException, CliwixLiferayNotSupportedException, CliwixLiferayNotFoundException, Cliwix}
import org.springframework.stereotype.Controller
import org.springframework.web.bind.annotation._
import scala.beans.BeanProperty
@Controller
@RequestMapping(value = Array("/services"))
class InfoController extends ControllerDefaults {
@BeanProperty
@Inject
var cliwixCoreHolder: CliwixCoreHolder = _
@RequestMapping(value = Array("/info"), method = Array(RequestMethod.GET))
def info(request: HttpServletRequest) = {
checkPermission(request)
new Info
}
@RequestMapping(value = Array("/info/status"), method = Array(RequestMethod.GET))
def liferayInfo(request: HttpServletRequest) = {
checkPermission(request)
try {
cliwixCoreHolder.getCliwix
new InfoStatus("READY")
} catch {
case e: CliwixLiferayNotFoundException =>
new InfoStatus("LIFERAY_NOT_FOUND")
case e: CliwixLiferayNotSupportedException =>
new InfoStatus("LIFERAY_VERSION_NOT_SUPPORTED")
case e: CliwixLiferayNotReadyException =>
new InfoStatus("NOT_READY")
}
}
class InfoStatus(s: String) extends Serializable {
@BeanProperty
val status: String = s
}
class Info extends Serializable {
@BeanProperty
val cliwixVersion = Cliwix.getVersion
@BeanProperty
val cliwixWorkspaceDirectory = webappConfig.getWorkspaceDirectory.getAbsolutePath
@BeanProperty
val liferayRelease = cliwixCoreHolder.getCliwix.getLiferayInfo.getReleaseInfo
}
}
| nonblocking/cliwix | cliwix-webapp/src/main/scala/at/nonblocking/cliwix/webapp/InfoController.scala | Scala | agpl-3.0 | 2,510 |
/*
* @author Philip Stutz
*
* Copyright 2010 University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.signalcollect
import com.signalcollect.interfaces.ComplexAggregation
import com.signalcollect.interfaces.WorkerStatistics
import akka.actor.ActorSystem
import com.signalcollect.interfaces.AggregationOperation
/**
* Graph represents the entire Signal/Collect graph with its vertices and edges.
* It offers functions to execute computations and aggregation operations on this graph.
* Additionally it extends GraphEditor, which means that it offers functions to manipulate the graph.
*
* @note This class is usually instantiated using the `GraphBuilder`
*
* @see GraphBuilder, DefaultGraph
*
* @example `val graph = GraphBuilder.build`
*
* @author Philip Stutz
*/
trait Graph[Id, Signal] extends GraphEditor[Id, Signal] {
def numberOfNodes: Int
def numberOfWorkers: Int
/**
* Starts the execution of the computation using the default execution parameters and
* returns information about the execution. The method blocks until the computation has ended.
*
* @return Information about the configuration and statistics about this execution.
*
* @note Blocks during execution.
*
* @note Termination: There are three reasons why a computation may end:
* - All signal/collect scores are below the thresholds
* - Steps limit reached (for synchronous compute graphs)
* - Time limit exceeded (not yet implemented)
*
* @note It may make sense to call this method repeatedly, for example if a compute graph is modified after execution.
*/
def execute(): ExecutionInformation[Id, Signal]
/**
* Starts the execution of the computation using the default execution parameters and
* returns information about the execution. The method blocks until the computation has ended.
*
* @param executionConfiguration Specifies configuration parameters that influence the execution.
*
* @return Information about the configuration and statistics about this execution.
*
* @note Blocks during execution.
*
* @note Termination: There are three reasons why a computation may end:
* - All signal/collect scores are below the thresholds
* - Steps limit reached (for synchronous compute graphs)
* - Time limit exceeded (not yet implemented)
*
* @note It may make sense to call this method repeatedly, for example if a compute graph is modified after execution.
*/
def execute(executionConfiguration: ExecutionConfiguration[Id, Signal]): ExecutionInformation[Id, Signal]
/**
* Recalculates the signal/collect scores of all vertices.
*
* @note If the scores are above the respective thresholds, the signal/collect operations
* will be executed when the computation is executed again.
*
* @note This operation is meant to be used after the foreachVertex operation in case
* the vertex signal/collect scores have changed.
*
* @see `foreachVertex`
*/
def recalculateScores(): Unit
/**
* Waits until all processing has finished.
*
* @note Only used with continuous asynchronous execution.
*/
def awaitIdle(): Unit
/**
* Shuts down the compute graph and frees associated resources.
*
* @note If methods on a ComputeGraph instance get called after having called `shutdown`, then the behavior is not specified.
*/
def shutdown(): Unit
/**
* Executes the function `f` on the vertex with id `vertexId` and returns the result.
*
* @return Returns the result of function `f`.
*
* @note The function `f` may be executed in another thread or on another computer.
*
* @note References to objects that are not reachable from the vertex passed to
* the function as a parameter may not be accessible or may be subject to race conditions.
*
* @param f The function that gets executed on the vertex with id `vertexId`
*
* @example `forVertexWithId(vertexId = 1, f = { v: Vertex[_, _, _, _] => v.state })`
*
* @usecase def forVertexWithId(vertexId: Any, f: Vertex[_, _, _, _] => String): String
*/
def forVertexWithId[VertexType <: Vertex[Id, _, Id, Signal], ResultType](vertexId: Id, f: VertexType => ResultType): ResultType
/**
* Executes the function `f` on all vertices.
*
* @note The function `f` may be executed in multiple other threads, beware of race conditions.
*
* @note This function may be executed on other machines and references
* to objects that are not reachable from the vertex-parameter may not be accessible.
*/
def foreachVertex(f: Vertex[Id, _, Id, Signal] => Unit): Unit
/**
* The worker passes a GraphEditor to function `f`, and then executes the resulting function on all vertices.
*
* @note The resulting function may be executed in multiple other threads, beware of race conditions.
*
* @note The resulting function may be executed on other machines and references
* to objects that are not reachable from the vertex-parameter may not be accessible.
*/
def foreachVertexWithGraphEditor(f: GraphEditor[Id, Signal] => Vertex[Id, _, Id, Signal] => Unit): Unit
/**
* Applies an aggregation operation to the graph and returns the result.
*
* @param aggregationOperation The aggregation operation that will get executed on the graph
*
* @return The result of the aggregation operation.
*
* @note There is no guarantee about the order in which the aggregation operations get executed on the vertices.
*
* @example See concrete implementations of other aggregation operations, i.e. `SumOfStates`.
*/
def aggregate[ResultType](aggregationOperation: ComplexAggregation[_, ResultType]): ResultType
/**
* Simplified alternative API for aggregation operations.
*
* @param map Function that extracts the relevant value from a vertex.
* @param reduce Aggregation operation that is executed on the extracted values.
* @param neutralElement Neutral element of the aggregation operation 'reduce'.
*
* @example Computes sum of ranks: graph.mapReduce[PageRankVertex, Double](v => v.state, _ + _, 0)
*/
def mapReduce[VertexType <: Vertex[_, _, _, _], ResultType](
map: VertexType => ResultType,
reduce: (ResultType, ResultType) => ResultType,
neutralElement: ResultType): ResultType = {
val r = reduce // Rename to avoid collision with the name of the inner function.
val aggregation = new AggregationOperation[ResultType] {
def extract(v: Vertex[_, _, _, _]): ResultType = {
try {
map(v.asInstanceOf[VertexType])
} catch {
case _: Throwable =>
neutralElement
}
}
def reduce(elements: Stream[ResultType]): ResultType = {
elements.foldLeft(neutralElement)(r)
}
}
val result = aggregate(aggregation)
result
}
/**
* Resets operation statistics and removes all the vertices and edges in this graph.
* Leaves the message counters untouched.
*/
def reset(): Unit
/**
* Returns the local internally used actor system of this this Signal/Collect graph.
*
* @return the internal ActorSystem.
*/
private[signalcollect] def system: ActorSystem
/**
* Gathers worker statistics.
*
* @return Various individual statistics from all workers.
*/
private[signalcollect] def getWorkerStatistics(): List[WorkerStatistics]
/**
* Creates a snapshot of all the vertices in all workers.
* Does not store the toSignal/toCollect collections or pending messages.
* Should only be used when the workers are idle.
* Overwrites any previous snapshot that might exist.
*/
private[signalcollect] def snapshot(): Unit
/**
* Restores the last snapshot of all the vertices in all workers.
* Does not store the toSignal/toCollect collections or pending messages.
* Should only be used when the workers are idle.
*/
private[signalcollect] def restore(): Unit
/**
* Deletes the worker snapshots if they exist.
*/
private[signalcollect] def deleteSnapshot(): Unit
}
/**
* In order to unlock advanced methods on Graph, add this import to your program:
* import com.signalcollect.ExtendedGraph._
*/
object ExtendedGraph {
implicit class InternalGraph(g: Graph[_, _]) {
/**
* Returns the local internally used actor system of this this Signal/Collect graph.
*
* @return the internal ActorSystem.
*/
def system(): ActorSystem = g.system
/**
* Gathers worker statistics.
*
* @return Various individual statistics from all workers.
*/
def getWorkerStatistics() = g.getWorkerStatistics
/**
* Creates a snapshot of all the vertices in all workers.
* Does not store the toSignal/toCollect collections or pending messages.
* Should only be used when the workers are idle.
* Overwrites any previous snapshot that might exist.
*/
def snapshot() = g.snapshot
/**
* Restores the last snapshot of all the vertices in all workers.
* Does not store the toSignal/toCollect collections or pending messages.
* Should only be used when the workers are idle.
*/
def restore() = g.restore
/**
* Deletes the worker snapshots if they exist.
*/
def deleteSnapshot() = g.deleteSnapshot
}
}
| uzh/signal-collect | src/main/scala/com/signalcollect/Graph.scala | Scala | apache-2.0 | 9,952 |
/*
* Copyright 2017 Datamountaineer.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.datamountaineer.streamreactor.connect.rethink.source
import java.util
import java.util.concurrent.LinkedBlockingQueue
import java.util.concurrent.atomic.AtomicBoolean
import com.datamountaineer.streamreactor.connect.rethink.ReThinkConnection
import com.datamountaineer.streamreactor.connect.rethink.config.{ReThinkSourceConfig, ReThinkSourceSetting, ReThinkSourceSettings}
import com.rethinkdb.RethinkDB
import com.rethinkdb.net.{Connection, Cursor}
import com.typesafe.scalalogging.StrictLogging
import org.apache.kafka.connect.data.SchemaBuilder
import org.apache.kafka.connect.source.SourceRecord
import scala.collection.JavaConverters._
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.Future
object ReThinkSourceReadersFactory {
def apply(config: ReThinkSourceConfig, r: RethinkDB): Set[ReThinkSourceReader] = {
val conn = Some(ReThinkConnection(r, config))
val settings = ReThinkSourceSettings(config)
settings.map(s => new ReThinkSourceReader(r, conn.get, s))
}
}
class ReThinkSourceReader(rethink: RethinkDB, conn: Connection, setting: ReThinkSourceSetting)
extends StrictLogging {
logger.info(s"Initialising ReThink Reader for ${setting.source}")
private val keySchema = SchemaBuilder.string().optional().build()
private val valueSchema = ChangeFeedStructBuilder.schema
private val sourcePartition = Map.empty[String, String]
private val offset = Map.empty[String, String]
private val stopFeed = new AtomicBoolean(false)
private val handlingFeed = new AtomicBoolean(false)
private var feed : Cursor[util.HashMap[String, String]] = _
val queue = new LinkedBlockingQueue[SourceRecord]()
val batchSize = setting.batchSize
def start() = {
feed = getChangeFeed()
startFeed(feed)
}
def stop() = {
logger.info(s"Closing change feed for ${setting.source}")
stopFeed.set(true)
while (handlingFeed.get()) {
logger.debug("Waiting for feed to shutdown...")
Thread.sleep(1000)
}
feed.close()
logger.info(s"Change feed closed for ${setting.source}")
}
/**
* Start the change feed, wrap in future.
**/
private def startFeed(feed: Cursor[util.HashMap[String, String]]) : Future[Unit] = Future(handleFeed(feed))
/**
* Construct a change feed, convert any changes
* to a Struct and add to queue for draining
* by the task
*
**/
private def handleFeed(feed: Cursor[util.HashMap[String, String]]) = {
handlingFeed.set(true)
//feed.next is blocking
while(!stopFeed.get()) {
logger.debug(s"Waiting for next change feed event for ${setting.source}")
val cdc = convert(feed.next().asScala.toMap)
queue.put(cdc)
}
handlingFeed.set(false)
}
private def getChangeFeed(): Cursor[util.HashMap[String, String]] = {
logger.info(s"Initialising change feed for ${setting.source}")
rethink
.db(setting.db)
.table(setting.source)
.changes()
.optArg("include_states", true)
.optArg("include_initial", setting.initialise)
.optArg("include_types", true)
.run(conn)
}
private def convert(feed: Map[String, String]) = {
new SourceRecord(sourcePartition.asJava, offset.asJava, setting.target, keySchema, setting.source, valueSchema,
ChangeFeedStructBuilder(feed))
}
}
| datamountaineer/stream-reactor | kafka-connect-rethink/src/main/scala/com/datamountaineer/streamreactor/connect/rethink/source/ReThinkSourceReadersFactory.scala | Scala | apache-2.0 | 3,926 |
package net.mtgto.garoon
case class RequestToken(value: String)
| mtgto/garoon | src/main/scala/net/mtgto/garoon/RequestToken.scala | Scala | gpl-3.0 | 65 |
package mesosphere.marathon
package raml
import java.time.OffsetDateTime
import mesosphere.marathon.core.condition
import mesosphere.marathon.core.health.{ MesosCommandHealthCheck, MesosHttpHealthCheck, MesosTcpHealthCheck, PortReference }
import mesosphere.marathon.core.instance
import mesosphere.marathon.core.pod.{ MesosContainer, PodDefinition }
import mesosphere.marathon.core.task
import mesosphere.marathon.raml.LocalVolumeConversion.localVolumeIdWrites
import mesosphere.marathon.stream.Implicits._
trait PodStatusConversion {
import PodStatusConversion._
implicit val taskToContainerStatus: Writes[(PodDefinition, task.Task), ContainerStatus] = Writes { src =>
val (pod, task) = src
val since = task.status.startedAt.getOrElse(task.status.stagedAt).toOffsetDateTime // TODO(jdef) inaccurate
val maybeContainerSpec: Option[MesosContainer] = pod.container(task.taskId)
// possible that a new pod spec might not have a container with a name that was used in an old pod spec?
val endpointStatus = endpointStatuses(pod, maybeContainerSpec, task)
// some other layer should provide termination history
// if, for some very strange reason, we cannot determine the container name from the task ID then default to
// the Mesos task ID itself
val displayName: String = task.taskId.containerName.getOrElse(task.taskId.mesosTaskId.getValue)
val resources: Option[Resources] = {
task.status.condition match {
case condition.Condition.Staging |
condition.Condition.Starting |
condition.Condition.Running |
condition.Condition.Reserved |
condition.Condition.Unreachable |
condition.Condition.Killing =>
maybeContainerSpec.map(_.resources)
case _ =>
None
}
}
// TODO(jdef) message
ContainerStatus(
name = displayName,
status = condition.Condition.toMesosTaskStateOrStaging(task.status.condition).toString,
statusSince = since,
containerId = task.launchedMesosId.map(_.getValue),
endpoints = endpointStatus,
conditions = List(maybeHealthCondition(task.status, maybeContainerSpec, endpointStatus, since)).flatten,
resources = resources,
lastUpdated = since, // TODO(jdef) pods fixme
lastChanged = since // TODO(jdef) pods.fixme
)
}
/**
* generate a pod instance status RAML for some instance.
*/
implicit val podInstanceStatusRamlWriter: Writes[(PodDefinition, instance.Instance), PodInstanceStatus] = Writes { src =>
val (pod, instance) = src
assume(
pod.id == instance.instanceId.runSpecId,
s"pod id ${pod.id} should match spec id of the instance ${instance.instanceId.runSpecId}")
val containerStatus: Seq[ContainerStatus] = instance.tasksMap.values.map(t => Raml.toRaml((pod, t)))(collection.breakOut)
val (derivedStatus: PodInstanceState, message: Option[String]) = podInstanceState(
instance.state.condition, containerStatus)
val networkStatus: Seq[NetworkStatus] = networkStatuses(instance.tasksMap.values.to[Seq])
val resources: Resources = containerStatus.flatMap(_.resources).foldLeft(PodDefinition.DefaultExecutorResources) { (all, res) =>
all.copy(cpus = all.cpus + res.cpus, mem = all.mem + res.mem, disk = all.disk + res.disk, gpus = all.gpus + res.gpus)
}
val localVolumes = instance.reservation.fold(Seq.empty[LocalVolumeId]) { reservation =>
reservation.volumeIds.toRaml
}
// TODO(jdef) message, conditions: for example it would probably be nice to see a "healthy" condition here that
// summarizes the conditions of the same name for each of the instance's containers.
PodInstanceStatus(
id = instance.instanceId.idString,
status = derivedStatus,
statusSince = instance.state.since.toOffsetDateTime,
agentId = instance.agentInfo.agentId,
agentHostname = Some(instance.agentInfo.host),
agentRegion = instance.agentInfo.region,
agentZone = instance.agentInfo.zone,
resources = Some(resources),
networks = networkStatus,
containers = containerStatus,
localVolumes = localVolumes,
message = message,
specReference = Some(s"/v2/pods${pod.id}::versions/${instance.runSpecVersion.toOffsetDateTime}"),
lastUpdated = instance.state.since.toOffsetDateTime, // TODO(jdef) pods we don't actually track lastUpdated yet
lastChanged = instance.state.since.toOffsetDateTime
)
}
// TODO: Consider using a view here (since we flatMap and groupBy)
def networkStatuses(tasks: Seq[task.Task]): Seq[NetworkStatus] = tasks.flatMap { task =>
task.status.mesosStatus.filter(_.hasContainerStatus).fold(List.empty[NetworkStatus]) { mesosStatus =>
mesosStatus.getContainerStatus.getNetworkInfosList.map { networkInfo =>
NetworkStatus(
name = if (networkInfo.hasName) Some(networkInfo.getName) else None,
addresses = networkInfo.getIpAddressesList
.withFilter(_.hasIpAddress).map(_.getIpAddress)(collection.breakOut)
)
}(collection.breakOut)
}
}.groupBy(_.name).values.map { toMerge =>
val networkStatus: NetworkStatus = toMerge.reduceLeft { (merged, single) =>
merged.copy(addresses = merged.addresses ++ single.addresses)
}
networkStatus.copy(addresses = networkStatus.addresses.distinct)
}(collection.breakOut)
def healthCheckEndpoint(spec: MesosContainer): Option[String] = {
def invalidPortIndex[T](msg: String): T = throw new IllegalStateException(msg)
spec.healthCheck.collect {
case check: MesosHttpHealthCheck => check.portIndex
case check: MesosTcpHealthCheck => check.portIndex
}.map {
_.fold(
invalidPortIndex(s"missing portIndex to map to an endpoint for container ${spec.name}")
){
case portName: PortReference.ByName => portName.value
case _ => invalidPortIndex("index byInt not supported for pods")
}
}
}
/**
* check that task is running; if so, calculate health condition according to possible command-line health check
* or else endpoint health checks.
*/
def maybeHealthCondition(
status: task.Task.Status,
maybeContainerSpec: Option[MesosContainer],
endpointStatuses: Seq[ContainerEndpointStatus],
since: OffsetDateTime): Option[StatusCondition] = {
status.condition match {
case condition.Condition.Created |
condition.Condition.Staging |
condition.Condition.Starting |
condition.Condition.Reserved =>
// not useful to report health conditions for tasks that have never reached a running state
None
case _ =>
val healthy: Option[(Boolean, String)] = maybeContainerSpec.flatMap { containerSpec =>
val usingCommandHealthCheck: Boolean = containerSpec.healthCheck.exists {
case _: MesosCommandHealthCheck => true
case _ => false
}
if (usingCommandHealthCheck) {
Some(status.healthy.fold(false -> HEALTH_UNREPORTED) {
_ -> HEALTH_REPORTED
})
} else {
val ep = healthCheckEndpoint(containerSpec)
ep.map { endpointName =>
val epHealthy: Option[Boolean] = endpointStatuses.find(_.name == endpointName).flatMap(_.healthy)
// health check endpoint was specified, but if we don't have a value for health yet then generate a
// meaningful reason code
epHealthy.fold(false -> HEALTH_UNREPORTED) {
_ -> HEALTH_REPORTED
}
}
}
}
healthy.map { h =>
StatusCondition(
name = STATUS_CONDITION_HEALTHY,
lastChanged = since,
lastUpdated = since, // TODO(jdef) pods only changes are propagated, so this isn't right
value = h._1.toString,
reason = Some(h._2)
)
}
}
}
def endpointStatuses(
pod: PodDefinition,
maybeContainerSpec: Option[MesosContainer],
task: core.task.Task): Seq[ContainerEndpointStatus] =
maybeContainerSpec.flatMap { _ =>
if (task.isActive) {
val taskHealthy: Option[Boolean] = // only calculate this once so we do it here
task.status.healthy
task.taskId.containerName.flatMap { containerName =>
pod.container(containerName).flatMap { containerSpec =>
val endpointRequestedHostPort: Seq[String] =
containerSpec.endpoints.withFilter(_.hostPort.isDefined).map(_.name)
val reservedHostPorts: Seq[Int] = task.status.networkInfo.hostPorts
// TODO(jdef): This assumption doesn't work...
/*assume(
endpointRequestedHostPort.size == reservedHostPorts.size,
s"number of reserved host ports ${reservedHostPorts.size} should equal number of" +
s"requested host ports ${endpointRequestedHostPort.size}")
*/
// we assume that order has been preserved between the allocated port list and the endpoint list
// TODO(jdef) pods what actually guarantees that this doesn't change? (do we check this upon pod update?)
def reservedEndpointStatus: Seq[ContainerEndpointStatus] =
endpointRequestedHostPort.zip(reservedHostPorts).map {
case (name, allocated) =>
ContainerEndpointStatus(name, Some(allocated))
}
def unreservedEndpointStatus: Seq[ContainerEndpointStatus] = containerSpec.endpoints
.withFilter(_.hostPort.isEmpty).map(ep => ContainerEndpointStatus(ep.name))
def withHealth: Seq[ContainerEndpointStatus] = {
val allEndpoints = reservedEndpointStatus ++ unreservedEndpointStatus
// check whether health checks are enabled for this endpoint. if they are then propagate the mesos task
// health check result.
healthCheckEndpoint(containerSpec).flatMap { name =>
// update the `health` field of the endpoint status...
allEndpoints.find(_.name == name).map(_.copy(healthy = taskHealthy))
}.fold(allEndpoints) { updated =>
// ... and replace the old entry with the one from above
allEndpoints.filter(_.name != updated.name) ++ List(updated)
}
}
Some(withHealth)
}
}
} else {
None
}
}.getOrElse(List.empty[ContainerEndpointStatus])
def podInstanceState(
instanceCondition: core.condition.Condition,
containerStatus: Seq[ContainerStatus]): (PodInstanceState, Option[String]) = {
instanceCondition match {
case condition.Condition.Created |
condition.Condition.Reserved =>
PodInstanceState.Pending -> None
case condition.Condition.Staging |
condition.Condition.Starting =>
PodInstanceState.Staging -> None
case condition.Condition.Error |
condition.Condition.Failed |
condition.Condition.Finished |
condition.Condition.Killed |
condition.Condition.Gone |
condition.Condition.Dropped |
condition.Condition.Unknown |
condition.Condition.Killing =>
PodInstanceState.Terminal -> None
case condition.Condition.Unreachable |
condition.Condition.UnreachableInactive =>
PodInstanceState.Degraded -> Some(MSG_INSTANCE_UNREACHABLE)
case condition.Condition.Running =>
if (containerStatus.exists(_.conditions.exists { cond =>
cond.name == STATUS_CONDITION_HEALTHY && cond.value == "false"
}))
PodInstanceState.Degraded -> Some(MSG_INSTANCE_UNHEALTHY_CONTAINERS)
else
PodInstanceState.Stable -> None
}
}
}
object PodStatusConversion extends PodStatusConversion {
val HEALTH_UNREPORTED = "health-unreported-by-mesos"
val HEALTH_REPORTED = "health-reported-by-mesos"
val STATUS_CONDITION_HEALTHY = "healthy"
val MSG_INSTANCE_UNREACHABLE = "pod instance has become unreachable"
val MSG_INSTANCE_UNHEALTHY_CONTAINERS = "at least one container is not healthy"
}
| guenter/marathon | src/main/scala/mesosphere/marathon/raml/PodStatusConversion.scala | Scala | apache-2.0 | 12,236 |
/*
* La Trobe University - Distributed Deep Learning System
* Copyright 2016 Matthias Langer ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package edu.latrobe.blaze.modules.jvm
import edu.latrobe._
import edu.latrobe.blaze.modules.{SoftPlus10, SoftPlus10Builder}
abstract class SoftPlus10_JVM
extends SoftPlus10
with MapLayer_JVM[SoftPlus10Builder] {
// ---------------------------------------------------------------------------
// Forward propagation related.
// ---------------------------------------------------------------------------
final override protected def doPredict(input: Tensor)
: RealArrayTensor = {
val out = input.toRealArrayTensor
doPredict(out)
out
}
protected def doPredict(output: RealArrayTensor): Unit
final override protected def doPredictInv(output: Tensor)
: RealArrayTensor = {
val inp = output.toRealArrayTensor
doPredictInv(inp)
inp
}
protected def doPredictInv(input: RealArrayTensor): Unit
// ---------------------------------------------------------------------------
// Back propagation related.
// ---------------------------------------------------------------------------
final override protected def doDeriveInputError(input: Tensor, error: Tensor)
: RealArrayTensor = {
val inp = input.asOrToRealArrayTensor
val err = error.asOrToRealArrayTensor
doDeriveInputError(inp, err)
if (inp ne input) {
inp.close()
}
err
}
protected def doDeriveInputError(input: RealArrayTensor,
error: RealArrayTensor)
: Unit
}
| bashimao/ltudl | blaze/src/main/scala/edu/latrobe/blaze/modules/jvm/SoftPlus10_JVM.scala | Scala | apache-2.0 | 2,137 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cmwell.util.exceptions
class RetryException extends Exception {}
| hochgi/CM-Well | server/cmwell-util/src/main/scala/cmwell/util/exceptions/RetryException.scala | Scala | apache-2.0 | 690 |
package mesosphere.marathon
package core.storage.store.impl.zk
import akka.Done
import akka.util.ByteString
import scala.jdk.CollectionConverters._
import org.apache.curator.framework.CuratorFramework
import org.apache.curator.framework.api.CuratorEventType._
import org.apache.curator.framework.api.{BackgroundCallback, CuratorEvent}
import org.apache.zookeeper.KeeperException
import org.apache.zookeeper.data.{ACL, Stat}
import scala.concurrent.duration.Duration
import scala.concurrent.{CanAwait, ExecutionContext, Future, Promise, TimeoutException}
import scala.util.{Failure, Success, Try}
private[zk] abstract class ZkFuture[T] extends Future[T] with BackgroundCallback {
private val promise = Promise[T]()
override def onComplete[U](f: (Try[T]) => U)(implicit executor: ExecutionContext): Unit =
promise.future.onComplete(f)
override def isCompleted: Boolean = promise.isCompleted
override def value: Option[Try[T]] = promise.future.value
override def processResult(client: CuratorFramework, event: CuratorEvent): Unit = {
import KeeperException.Code._
val resultCode = KeeperException.Code.get(event.getResultCode)
if (resultCode != OK) {
promise.failure(KeeperException.create(resultCode))
} else {
promise.complete(processEvent(event))
}
}
@scala.throws[Exception](classOf[Exception])
override def result(atMost: Duration)(implicit permit: CanAwait): T = promise.future.result(atMost)
@scala.throws[InterruptedException](classOf[InterruptedException])
@scala.throws[TimeoutException](classOf[TimeoutException])
override def ready(atMost: Duration)(implicit permit: CanAwait): this.type = {
promise.future.ready(atMost)
this
}
def fail(e: Throwable): Future[T] = {
promise.tryFailure(e)
this
}
protected def processEvent(event: CuratorEvent): Try[T]
override def transform[S](f: Try[T] => Try[S])(implicit executor: ExecutionContext): Future[S] = {
val p = Promise[S]
onComplete { tryT =>
p.complete(Try(f(tryT)).flatten)
}(executor)
p.future
}
override def transformWith[S](f: Try[T] => Future[S])(implicit executor: ExecutionContext): Future[S] = {
val p = Promise[S]
onComplete { tryT =>
try p.completeWith(f(tryT))
catch {
case ex: Throwable =>
p.failure(ex)
}
}(executor)
p.future
}
}
private class CreateOrDeleteFuture extends ZkFuture[String] {
override protected def processEvent(event: CuratorEvent): Try[String] =
event.getType match {
case CREATE | DELETE =>
Success(event.getPath)
case _ =>
Failure(new IllegalArgumentException(s"${event.getType} is not a CREATE or DELETE operation"))
}
}
case class ExistsResult(path: String, stat: Stat)
private class ExistsFuture extends ZkFuture[ExistsResult] {
override protected def processEvent(event: CuratorEvent): Try[ExistsResult] =
event.getType match {
case EXISTS =>
Success(ExistsResult(event.getPath, event.getStat))
case _ =>
Failure(new IllegalArgumentException(s"${event.getType} is not an EXISTS operation"))
}
}
case class GetData(path: String, stat: Stat, data: ByteString)
private class GetDataFuture extends ZkFuture[GetData] {
override protected def processEvent(event: CuratorEvent): Try[GetData] =
event.getType match {
case GET_DATA =>
Success(GetData(event.getPath, event.getStat, ByteString(event.getData)))
case _ =>
Failure(new IllegalArgumentException(s"${event.getType} is not a GET_DATA operation"))
}
}
case class SetData(path: String, stat: Stat)
private class SetDataFuture extends ZkFuture[SetData] {
override protected def processEvent(event: CuratorEvent): Try[SetData] =
event.getType match {
case SET_DATA =>
Success(SetData(event.getPath, event.getStat))
case _ =>
Failure(new IllegalArgumentException(s"${event.getType} is not a SET_DATA operation"))
}
}
case class Children(path: String, stat: Stat, children: Seq[String])
private class ChildrenFuture extends ZkFuture[Children] {
override protected def processEvent(event: CuratorEvent): Try[Children] =
event.getType match {
case CHILDREN =>
Success(Children(event.getPath, event.getStat, event.getChildren.asScala.to(IndexedSeq)))
case _ =>
Failure(new IllegalArgumentException(s"${event.getType} is not a CHILDREN operation"))
}
}
private class SyncFuture extends ZkFuture[Option[Stat]] {
override protected def processEvent(event: CuratorEvent): Try[Option[Stat]] =
event.getType match {
case SYNC =>
Success(Option(event.getStat))
case _ =>
Failure(new IllegalArgumentException(s"${event.getType} is not a SYNC operation"))
}
}
private class GetAclFuture extends ZkFuture[Seq[ACL]] {
override protected def processEvent(event: CuratorEvent): Try[Seq[ACL]] =
event.getType match {
case GET_ACL =>
Success(event.getACLList.asScala.to(IndexedSeq))
case _ =>
Failure(new IllegalArgumentException(s"${event.getType} is not a GET_ACL operation"))
}
}
private class SetAclFuture extends ZkFuture[Done] {
override protected def processEvent(event: CuratorEvent): Try[Done] =
event.getType match {
case SET_ACL =>
Success(Done)
case _ =>
Failure(new IllegalArgumentException(s"${event.getType} is not a SET_ACL operation"))
}
}
private[zk] object ZkFuture {
def create: ZkFuture[String] = new CreateOrDeleteFuture
def delete: ZkFuture[String] = new CreateOrDeleteFuture
def exists: ZkFuture[ExistsResult] = new ExistsFuture
def data: ZkFuture[GetData] = new GetDataFuture
def setData: ZkFuture[SetData] = new SetDataFuture
def children: ZkFuture[Children] = new ChildrenFuture
def sync: ZkFuture[Option[Stat]] = new SyncFuture
def acl: ZkFuture[Seq[ACL]] = new GetAclFuture
def setAcl: ZkFuture[Done] = new SetAclFuture
}
| mesosphere/marathon | src/main/scala/mesosphere/marathon/core/storage/store/impl/zk/ZkFuture.scala | Scala | apache-2.0 | 5,978 |
package wdl
import cats.instances.list._
import cats.syntax.apply._
import cats.syntax.foldable._
import shapeless.Coproduct
import wdl.AstTools.EnhancedAstNode
import wdl.exception.{ValidationException, VariableLookupException, VariableNotFoundException}
import wdl.expression.WdlFunctions
import wdl4s.parser.WdlParser.{Ast, SyntaxError, Terminal}
import wom.callable.Callable
import wom.callable.Callable._
import wom.graph.CallNode._
import wom.graph.GraphNodePort.OutputPort
import wom.graph._
import wom.graph.expression.ExpressionNode
import wom.types.WomOptionalType
import wom.values.{WomOptionalValue, WomValue}
import scala.language.postfixOps
import scala.util.{Failure, Success, Try}
object WdlCall {
def apply(ast: Ast,
namespaces: Seq[WdlNamespace],
tasks: Seq[WdlTask],
workflows: Seq[WdlWorkflow],
wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter): WdlCall = {
val alias: Option[String] = ast.getAttribute("alias") match {
case x: Terminal => Option(x.getSourceString)
case _ => None
}
val taskName = ast.getAttribute("task").sourceString
val callable = WdlNamespace.findCallable(taskName, namespaces, tasks ++ workflows) getOrElse {
throw new SyntaxError(wdlSyntaxErrorFormatter.callReferencesBadTaskName(ast, taskName))
}
val callInputSectionMappings = processCallInput(ast, wdlSyntaxErrorFormatter)
callable match {
case task: WdlTask => WdlTaskCall(alias, task, callInputSectionMappings, ast)
case workflow: WdlWorkflow => WdlWorkflowCall(alias, workflow, callInputSectionMappings, ast)
}
}
private def processCallInput(ast: Ast,
wdlSyntaxErrorFormatter: WdlSyntaxErrorFormatter): Map[String, WdlExpression] = {
AstTools.callInputSectionIOMappings(ast, wdlSyntaxErrorFormatter) map { a =>
val key = a.getAttribute("key").sourceString
val expression = new WdlExpression(a.getAttribute("value"))
(key, expression)
} toMap
}
private[wdl] def buildWomNodeAndInputs(wdlCall: WdlCall, localLookup: Map[String, GraphNodePort.OutputPort], outerLookup: Map[String, GraphNodePort.OutputPort], preserveIndexForOuterLookups: Boolean) = {
import common.validation.ErrorOr._
val callNodeBuilder = new CallNode.CallNodeBuilder()
/*
* Each input mapping gets its own ExpressionNode:
*
* call my_task { input:
* input1 = "hi!" -> ExpressionNode with no input port
* input3 = other_task.out + 2 -> ExpressionNode with an input port pointing to the output port of other_task.out
* }
*/
def expressionNodeMappings: ErrorOr[Map[LocalName, ExpressionNode]] = wdlCall.inputMappings traverse {
case (inputName, wdlExpression) =>
val identifier = wdlCall.womIdentifier.combine(inputName)
WdlWomExpression.toExpressionNode(identifier, WdlWomExpression(wdlExpression, wdlCall), localLookup, outerLookup, preserveIndexForOuterLookups, wdlCall) map {
LocalName(inputName) -> _
}
}
/*
* Fold over the input definitions and
* 1) assign each input definition its InputDefinitionPointer
* 2) if necessary, create a graph input node and assign its output port to the input definition
*
* The InputDefinitionFold accumulates the input definition mappings, the create graph input nodes, and the expression nodes.
*/
def foldInputDefinitions(expressionNodes: Map[LocalName, ExpressionNode], callable: Callable): InputDefinitionFold = {
// Updates the fold with a new graph input node. Happens when an optional or required undefined input without an
// expression node mapping is found
def withGraphInputNode(inputDefinition: InputDefinition, graphInputNode: ExternalGraphInputNode) = {
InputDefinitionFold(
mappings = Map(inputDefinition -> Coproduct[InputDefinitionPointer](graphInputNode.singleOutputPort: OutputPort)),
callInputPorts = Set(callNodeBuilder.makeInputPort(inputDefinition, graphInputNode.singleOutputPort)),
newGraphInputNodes = Set(graphInputNode)
)
}
callable.inputs.foldMap {
// If there is an input mapping for this input definition, use that
case inputDefinition if expressionNodes.contains(inputDefinition.localName) =>
val expressionNode = expressionNodes(inputDefinition.localName)
InputDefinitionFold(
mappings = Map(inputDefinition -> expressionNode.inputDefinitionPointer),
callInputPorts = Set(callNodeBuilder.makeInputPort(inputDefinition, expressionNode.singleExpressionOutputPort)),
newExpressionNodes = Set(expressionNode)
)
// No input mapping, use the default expression
case withDefault @ InputDefinitionWithDefault(_, _, expression) =>
InputDefinitionFold(
mappings = Map(withDefault -> Coproduct[InputDefinitionPointer](expression))
)
// No input mapping, required and we don't have a default value, create a new RequiredGraphInputNode
// so that it can be satisfied via workflow inputs
case required @ RequiredInputDefinition(n, womType) =>
val identifier = wdlCall.womIdentifier.combine(n)
withGraphInputNode(required, RequiredGraphInputNode(identifier, womType))
// No input mapping, no default value but optional, create a OptionalGraphInputNode
// so that it can be satisfied via workflow inputs
case optional @ OptionalInputDefinition(n, womType) =>
val identifier = wdlCall.womIdentifier.combine(n)
withGraphInputNode(optional, OptionalGraphInputNode(identifier, womType))
}
}
(expressionNodeMappings, wdlCall.callable.womDefinition) mapN {
case (mappings, callable) =>
val usedOgins: Set[OuterGraphInputNode] = for {
expressionNode <- mappings.values.toSet[ExpressionNode]
ogin <- expressionNode.upstreamOuterGraphInputNodes
} yield ogin
callNodeBuilder.build(wdlCall.womIdentifier, callable, foldInputDefinitions(mappings, callable).copy(usedOuterGraphInputNodes = usedOgins))
}
}
}
/**
* Represents a `call` block in a WDL workflow. Calls wrap tasks
* and optionally provide a subset of the inputs for the task (as inputMappings member).
* All remaining inputs to the task that are not declared in the `input` section
* of the `call` block are called unsatisfiedInputs
*
* @param alias The alias for this call. If two calls in a workflow use the same task
* then one of them needs to be aliased to a different name
* @param callable The callable that this `call` will invoke
* @param inputMappings A map of task-local input names and corresponding expression for the
* value of those inputs
*/
sealed abstract class WdlCall(val alias: Option[String],
val callable: WdlCallable,
val inputMappings: Map[String, WdlExpression],
val ast: Ast) extends WdlGraphNodeWithInputs with WorkflowScoped {
val unqualifiedName: String = alias getOrElse callable.unqualifiedName
def callType: String
def toCallOutput(output: Output) = output match {
case taskOutput: TaskOutput => CallOutput(this, taskOutput.copy(parent = Option(this)))
case workflowOutput: WorkflowOutput => CallOutput(this, workflowOutput.copy(parent = Option(this)))
case error => throw new Exception(s"Invalid output type ${error.getClass.getSimpleName}")
}
lazy val outputs: Seq[CallOutput] = callable.outputs map toCallOutput
override def children: Seq[Scope] = super.children ++ outputs
/**
* Returns a Seq[WorkflowInput] representing the inputs to the call that are
* needed before its command can be constructed. This excludes inputs that
* are satisfied via the 'input' section of the Call definition.
*/
def unsatisfiedInputs: Seq[InputDefinition] = for {
i <- declarations if !inputMappings.contains(i.unqualifiedName) && i.expression.isEmpty && !i.womType.isInstanceOf[WomOptionalType]
} yield RequiredInputDefinition(i.fullyQualifiedName, i.womType)
override def toString: String = s"[Call $fullyQualifiedName]"
/**
* The call is responsible for evaluating runtime inputs for its underlying task,
* as the input value are provided for a specific call.
* The returned value is a map from Declaration to WomValue.
* The keys int the return value are the task's declarations,
* not the call's, as they will be used later for command instantiation
* as well as output evaluation, which will both be performed by the task.
*/
def evaluateTaskInputs(inputs: WorkflowCoercedInputs,
wdlFunctions: WdlFunctions[WomValue],
outputResolver: OutputResolver = NoOutputResolver,
shards: Map[Scatter, Int] = Map.empty[Scatter, Int]): Try[EvaluatedTaskInputs] = {
type EvaluatedDeclarations = Map[Declaration, Try[WomValue]]
def doDeclaration(currentInputs: EvaluatedDeclarations, declaration: Declaration): EvaluatedDeclarations = {
val newInputs = inputs ++ currentInputs.collect{
case (decl, Success(value)) => decl.fullyQualifiedName -> value
}
val lookup = lookupFunction(newInputs, wdlFunctions, outputResolver, shards, relativeTo = declaration)
val evaluatedDeclaration = Try(lookup(declaration.unqualifiedName))
val coercedDeclaration: Try[WomValue] = evaluatedDeclaration match {
case Success(ed) => declaration.womType.coerceRawValue(ed)
case Failure(_: VariableNotFoundException) if declaration.womType.isInstanceOf[WomOptionalType] =>
val innerType = declaration.womType.asInstanceOf[WomOptionalType].memberType
Success(WomOptionalValue(innerType, None))
case Failure(f) => Failure(f)
}
currentInputs + (declaration -> coercedDeclaration)
}
val declarationAttempts = callable.declarations.foldLeft[EvaluatedDeclarations](Map.empty)(doDeclaration)
val (success, errors) = declarationAttempts partition {
case (_, Success(_)) => true
case _ => false
}
if (errors.nonEmpty) {
val throwables = errors.toList map { _._2.failed.get }
Failure(ValidationException(s"Input evaluation for Call $fullyQualifiedName failed.", throwables))
} else {
Success(success map { case (d, v) => d -> v.get })
}
}
/**
* Overrides the default lookup function to provide call specific resolution.
*/
override def lookupFunction(inputs: WorkflowCoercedInputs,
wdlFunctions: WdlFunctions[WomValue],
outputResolver: OutputResolver = NoOutputResolver,
shards: Map[Scatter, Int] = Map.empty[Scatter, Int],
relativeTo: Scope = this): String => WomValue = {
def lookup(name: String): WomValue = {
val inputMappingsWithMatchingName = Try(
inputMappings.getOrElse(name, throw new Exception(s"Could not find $name in input section of call $fullyQualifiedName"))
)
val declarationsWithMatchingName = Try(
declarations.find(_.unqualifiedName == name).getOrElse(throw new Exception(s"No declaration named $name for call $fullyQualifiedName"))
)
val inputMappingsLookup = for {
inputExpr <- inputMappingsWithMatchingName
parent <- Try(parent.getOrElse(throw new Exception(s"Call $unqualifiedName has no parent")))
evaluatedExpr <- inputExpr.evaluate(parent.lookupFunction(inputs, wdlFunctions, outputResolver, shards, relativeTo), wdlFunctions)
// Coerce the input into the declared type:
declaration <- declarationsWithMatchingName
coerced <- declaration.womType.coerceRawValue(evaluatedExpr)
} yield coerced
def unsuppliedDeclarationValue(declaration: Declaration) = declaration.womType match {
case opt: WomOptionalType => opt.none
case _ => throw VariableNotFoundException(declaration)
}
val declarationLookup = for {
declaration <- declarationsWithMatchingName
inputsLookup <- Try(inputs.getOrElse(declaration.fullyQualifiedName, unsuppliedDeclarationValue(declaration)))
} yield inputsLookup
val declarationExprLookup = for {
declaration <- declarationsWithMatchingName
declarationExpr <- Try(declaration.expression.getOrElse(throw VariableNotFoundException(declaration)))
evaluatedExpr <- declarationExpr.evaluate(lookupFunction(inputs, wdlFunctions, outputResolver, shards, relativeTo), wdlFunctions)
} yield evaluatedExpr
val taskParentResolution = for {
parent <- Try(callable.parent.getOrElse(throw new Exception(s"Task ${callable.unqualifiedName} has no parent")))
parentLookup <- Try(parent.lookupFunction(inputs, wdlFunctions, outputResolver, shards, relativeTo)(name))
} yield parentLookup
val resolutions = Seq(inputMappingsLookup, declarationExprLookup, declarationLookup, taskParentResolution)
resolutions collectFirst { case Success(value) => value } getOrElse {
resolutions.toList.flatMap({
case Failure(_: VariableNotFoundException) => None
case Failure(ex) => Option(ex) // Only take failures that are not VariableNotFoundExceptions
case _ => None
}) match {
case Nil => throw VariableNotFoundException(name)
case exs => throw new VariableLookupException(name, exs)
}
}
}
lookup
}
}
case class WdlTaskCall(override val alias: Option[String], task: WdlTask, override val inputMappings: Map[String, WdlExpression], override val ast: Ast) extends WdlCall(alias, task, inputMappings, ast) {
override val callType = "call"
}
case class WdlWorkflowCall(override val alias: Option[String], calledWorkflow: WdlWorkflow, override val inputMappings: Map[String, WdlExpression], override val ast: Ast) extends WdlCall(alias, calledWorkflow, inputMappings, ast) {
override val callType = "workflow"
}
| ohsu-comp-bio/cromwell | wdl/src/main/scala/wdl/WdlCall.scala | Scala | bsd-3-clause | 14,247 |
/**
* 实现一个BigSequence类,将64个bit的序列打包在一个Long值中。提供apply和update操作来获取和设置某个具体的bit
*/
class BitSequence(private var value: Long = 0) {
implicit def bool2int(b: Boolean) = if (b) 1 else 0
def update(bit: Int, state: Int) = value |= (state & 1L) << bit % 64
def apply(bit: Int): Int = if ((value & 1L << bit % 64) > 0) 1 else 0
override def toString = "%64s".format(value.toBinaryString).replace(" ", "0")
}
val x = new BitSequence()
x(5) = 1
x(63) = 1
x(64) = 1
println(x(5))
println(x) | vernonzheng/scala-for-the-Impatient | src/Chapter11/exercise07.scala | Scala | mit | 564 |
package quisp.highcharts
import quisp.enums._
import quisp.{ExtensibleJsFormat, GeneralJson, Point}
import spray.json.DefaultJsonProtocol._
import spray.json._
/**
* JSON formats for Highcharts config
* @author rodneykinney
*/
object HighchartsJson {
import quisp.GeneralJson.{colorJS, writerToFormat}
implicit val chartJS: JsonFormat[ChartOptions] = ExtensibleJsFormat(ChartOptions)
implicit val titleJS: JsonFormat[ChartTitle] = ExtensibleJsFormat(ChartTitle)
implicit val axisTitleJS: JsonFormat[AxisTitle] = ExtensibleJsFormat(AxisTitle)
implicit val axisJS: JsonFormat[Axis] = ExtensibleJsFormat(Axis)
implicit val exportingJS: JsonFormat[ExportOptions] = ExtensibleJsFormat(ExportOptions)
implicit val legendTitleJS = jsonFormat2(LegendTitle)
implicit val legendJS: JsonFormat[Legend] = ExtensibleJsFormat(Legend)
implicit val dataLabelsJS: JsonFormat[PointLabelFormat] = ExtensibleJsFormat(PointLabelFormat)
implicit val richPointJS: JsonFormat[RichPoint] = ExtensibleJsFormat(RichPoint)
implicit val markerJS: JsonFormat[MarkerConfig] = ExtensibleJsFormat(MarkerConfig)
implicit val plotSettingsJS: JsonFormat[SeriesSettings] = ExtensibleJsFormat(SeriesSettings)
implicit val plotOptionsJS: JsonFormat[PlotSpecificSettings] = ExtensibleJsFormat(PlotSpecificSettings)
implicit val dataJS: JsonFormat[Point] = new JsonWriter[Point] {
def write(obj: Point) = obj match {
case p: RichPoint => richPointJS.write(p)
case _ => GeneralJson.pointJS.write(obj)
}
}
implicit val seriesJS: JsonFormat[Series] = ExtensibleJsFormat(Series)
implicit val floatingLabelJS = jsonFormat2(FloatingLabel)
implicit val floatingLabelsJS = jsonFormat1(FloatingLabels)
implicit val highchartDataJS: JsonFormat[Chart] = ExtensibleJsFormat(Chart)
} | rodneykinney/quisp | src/main/scala/quisp/highcharts/HighchartsJson.scala | Scala | apache-2.0 | 1,794 |
/**
* Copyright (c) 2013, The National Archives <[email protected]>
* https://www.nationalarchives.gov.uk
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*/
package uk.gov.nationalarchives.csv.validator.schema.v1_0
import java.io.StringReader
import org.junit.runner.RunWith
import org.specs2.runner.JUnitRunner
import uk.gov.nationalarchives.csv.validator.schema._
@RunWith(classOf[JUnitRunner])
class SchemaParserSpecs extends SchemaSpecBase {
import TestSchemaParser._
"Schema" should {
"succeed for valid minimal schema" in {
val columnDefinitions = List(new ColumnDefinition(NamedColumnIdentifier("column1")),new ColumnDefinition(NamedColumnIdentifier("column2")),new ColumnDefinition(NamedColumnIdentifier("column3")))
val schema = """version 1.0
|@totalColumns 3
|@noHeader
|column1:
|column2:
|column3:""".stripMargin
parse(new StringReader(schema)) must beLike { case Success(parsedSchema, _) => parsedSchema mustEqual buildSchema1_0(TotalColumns(3), NoHeader())(columnDefinitions:_*) }
}
"fail if the schema version is wrong" in {
val schema = """version 1
@totalColumns 1
LastName: @IgnoreCase regex ("[a]")"""
parse(new StringReader(schema)) must beLike {
case Failure(messages, _) => messages mustEqual s"Schema version declaration 'version 1.0' missing or incorrect"
}
}
"succeed for extra white space around (including tabs)" in {
val schema = """version 1.0
@totalColumns 2
Name :
Age : """
parse(new StringReader(schema)) must beLike { case Success(parsedSchema, _) => parsedSchema mustEqual buildSchema1_0(TotalColumns(2))(namedColumn("Name"), namedColumn("Age")) }
}
"fail if column directives declared before rules" in {
val schema = """version 1.0
@totalColumns 1
LastName: @IgnoreCase regex ("[a]")"""
parse(new StringReader(schema)) must beLike {
case Failure(messages, _) => messages mustEqual "Invalid column directive"
}
}
"succeed if noHeader global directive set" in {
val schema = """version 1.0
@totalColumns 2 @noHeader
Name :
Age : """
parse(new StringReader(schema)) must beLike { case Success(parsedSchema, _) => parsedSchema mustEqual buildSchema1_0(TotalColumns(2), NoHeader())(namedColumn("Name"), namedColumn("Age")) }
}
val nonEmptySchema = buildSchema1_0(TotalColumns(2))(nonEmptyColumn("Name"), nonEmptyColumn("Age"))
"succeed for single-line comments" should {
"placed immediately after the prolog" in {
val schema = """version 1.0
@totalColumns 2
//start of body
Name : notEmpty
Age : notEmpty
"""
parse(new StringReader(schema)) must beLike { case Success(parsedSchema, _) => parsedSchema mustEqual nonEmptySchema }
}
"placed anywhere in the body" in {
val schema = """version 1.0
@totalColumns 2
//start of body
Name : notEmpty //inline comment
//comment before next rule
Age : notEmpty //another inline comment
"""
parse(new StringReader(schema)) must beLike { case Success(parsedSchema, _) => parsedSchema mustEqual nonEmptySchema }
}
"placed at the end of the body (with newline)" in {
val schema = """version 1.0
@totalColumns 2
Name : notEmpty
Age : notEmpty
//comment at end of body
"""
parse(new StringReader(schema)) must beLike { case Success(parsedSchema, _) => parsedSchema mustEqual nonEmptySchema }
}
"placed at the end of the body (without newline)" in {
val schema = """version 1.0
@totalColumns 2
Name : notEmpty
Age : notEmpty
//comment at end of body"""
parse(new StringReader(schema)) must beLike { case Success(parsedSchema, _) => parsedSchema mustEqual nonEmptySchema }
}
}
"succeed for multi-line comments" should {
"placed immediately after the prolog" in {
val schema = """version 1.0
@totalColumns 2
/*
start of body
*/
Name : notEmpty
Age : notEmpty
"""
parse(new StringReader(schema)) must beLike { case Success(parsedSchema, _) => parsedSchema mustEqual nonEmptySchema }
}
"placed anywhere in the body" in {
val schema = """version 1.0
@totalColumns 2
/*
start of body
*/
Name : notEmpty /* inline
comment */
/* comment before next rule */
Age : notEmpty /* another inline
comment */
"""
parse(new StringReader(schema)) must beLike { case Success(parsedSchema, _) => parsedSchema mustEqual nonEmptySchema }
}
"placed at the end of the body (with newline)" in {
val schema = """version 1.0
@totalColumns 2
Name : notEmpty
Age : notEmpty
/* comment at
end of body */
"""
parse(new StringReader(schema)) must beLike { case Success(parsedSchema, _) => parsedSchema mustEqual nonEmptySchema }
}
"placed at the end of the body (without newline)" in {
val schema = """version 1.0
@totalColumns 2
Name : notEmpty
Age : notEmpty
/* comment at end
of body */"""
parse(new StringReader(schema)) must beLike { case Success(parsedSchema, _) => parsedSchema mustEqual nonEmptySchema }
}
}
}
"When there is a named column for a rule the schema" should {
"succeed when there is a named column for a rule" in {
val columnDefinitions = List(new ColumnDefinition(NamedColumnIdentifier("FirstName"), Nil, Nil),new ColumnDefinition(NamedColumnIdentifier("LastName"), List( IsRule(Literal(Some("Yoda"))))) )
val schema = """version 1.0
@totalColumns 2 @noHeader
FirstName:
LastName: $FirstName/is("Yoda")"""
parse(new StringReader(schema)) must beLike { case Success(parsedSchema, _) => parsedSchema mustEqual buildSchema1_0(TotalColumns(2), NoHeader())(columnDefinitions:_*) }
}
"succeed when there is NO named column for a rule" in {
val columnDefinitions = List(new ColumnDefinition(NamedColumnIdentifier("FirstName"), Nil, Nil),new ColumnDefinition(NamedColumnIdentifier("LastName"), List( IsRule(Literal(Some("Yoda"))))) )
val schema = """version 1.0
@totalColumns 2 @noHeader
FirstName:
LastName: is("Yoda")"""
parse(new StringReader(schema)) must beLike { case Success(parsedSchema, _) => parsedSchema mustEqual buildSchema1_0(TotalColumns(2), NoHeader())(columnDefinitions:_*) }
}
"succeed when there is 'or' rule named column for a rule" in {
val columnDefinitions = List(new ColumnDefinition(NamedColumnIdentifier("FirstName"), Nil, Nil),new ColumnDefinition(NamedColumnIdentifier("LastName"), List( OrRule( IsRule(Literal(Some("Yoda"))),IsRule(Literal(Some("Darth"))) ) )) )
val schema = """version 1.0
@totalColumns 2 @noHeader
FirstName:
LastName: $FirstName/is("Yoda") or $FirstName/is("Darth")"""
parse(new StringReader(schema)) must beLike { case Success(parsedSchema, _) => parsedSchema mustEqual buildSchema1_0(TotalColumns(2), NoHeader())(columnDefinitions:_*) }
}
"fail if is an invalid explisit column format" in {
val schema = """version 1.0
@totalColumns 2 @noHeader
FirstName:
LastName: WRONGCOLUMN/is("Yoda") or $FirstName/is("Darth")"""
parse(new StringReader(schema)) must beLike {
case Failure(messages, _) => messages mustEqual "Invalid column definition"
}
}
}
} | adamretter/csv-validator | csv-validator-core/src/test/scala/uk/gov/nationalarchives/csv/validator/schema/v1_0/SchemaParserSpecs.scala | Scala | mpl-2.0 | 8,527 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play.data
import javax.validation.Validation
import org.specs2.mutable.Specification
import play.api.i18n._
import play.data.format.Formatters
import play.data.validation.Constraints.{ MaxLength, Required }
import scala.beans.BeanProperty
import scala.collection.JavaConverters._
class PartialValidationSpec extends Specification {
val messagesApi = new DefaultMessagesApi()
val jMessagesApi = new play.i18n.MessagesApi(messagesApi)
val formFactory = new FormFactory(jMessagesApi, new Formatters(jMessagesApi), FormSpec.validator())
"partial validation" should {
"not fail when fields not in the same group fail validation" in {
val form = formFactory.form(classOf[SomeForm], classOf[Partial]).bind(Map("prop2" -> "Hello", "prop3" -> "abc").asJava)
form.allErrors().asScala must beEmpty
}
"fail when a field in the group fails validation" in {
val form = formFactory.form(classOf[SomeForm], classOf[Partial]).bind(Map("prop3" -> "abc").asJava)
form.hasErrors must_== true
}
"support multiple validations for the same group" in {
val form1 = formFactory.form(classOf[SomeForm]).bind(Map("prop2" -> "Hello").asJava)
form1.hasErrors must_== true
val form2 = formFactory.form(classOf[SomeForm]).bind(Map("prop2" -> "Hello", "prop3" -> "abcd").asJava)
form2.hasErrors must_== true
}
}
}
trait Partial
class SomeForm {
@BeanProperty
@Required
var prop1: String = _
@BeanProperty
@Required(groups = Array(classOf[Partial]))
var prop2: String = _
@BeanProperty
@Required(groups = Array(classOf[Partial]))
@MaxLength(value = 3, groups = Array(classOf[Partial]))
var prop3: String = _
}
| wsargent/playframework | framework/src/play-java-forms/src/test/scala/play/data/PartialValidationSpec.scala | Scala | apache-2.0 | 1,778 |
package akkord.events
trait VoiceState {
val channel_id: Option[String]
val user_id: String
val session_id: String
val guild_id: Option[String]
val deaf: Boolean
val mute: Boolean
val self_deaf: Boolean
val self_mute: Boolean
val suppress: Boolean
}
case class VoiceStateImpl
(
override val channel_id: Option[String],
override val user_id: String,
override val session_id: String,
override val guild_id: Option[String],
override val deaf: Boolean,
override val mute: Boolean,
override val self_deaf: Boolean,
override val self_mute: Boolean,
override val suppress: Boolean
) extends VoiceState
| ryanmiville/akkord | src/main/scala/akkord/events/VoiceState.scala | Scala | mit | 632 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.predictionio.data.storage
import org.apache.predictionio.annotation.DeveloperApi
import org.apache.predictionio.annotation.Experimental
import scala.concurrent.Future
import scala.concurrent.Await
import scala.concurrent.duration.Duration
import scala.concurrent.ExecutionContext
import scala.concurrent.TimeoutException
import org.joda.time.DateTime
/** :: DeveloperApi ::
* Base trait of a data access object that directly returns [[Event]] without
* going through Spark's parallelization. Engine developers should use
* [[org.apache.predictionio.data.store.LEventStore]] instead of using this directly.
*
* @group Event Data
*/
@DeveloperApi
trait LEvents {
/** Default timeout for asynchronous operations that is set to 1 minute */
val defaultTimeout = Duration(60, "seconds")
/** :: DeveloperApi ::
* Initialize Event Store for an app ID and optionally a channel ID.
* This routine is to be called when an app is first created.
*
* @param appId App ID
* @param channelId Optional channel ID
* @return true if initialization was successful; false otherwise.
*/
@DeveloperApi
def init(appId: Int, channelId: Option[Int] = None): Boolean
/** :: DeveloperApi ::
* Remove Event Store for an app ID and optional channel ID.
*
* @param appId App ID
* @param channelId Optional channel ID
* @return true if removal was successful; false otherwise.
*/
@DeveloperApi
def remove(appId: Int, channelId: Option[Int] = None): Boolean
/** :: DeveloperApi ::
* Close this Event Store interface object, e.g. close connection, release
* resources, etc.
*/
@DeveloperApi
def close(): Unit
/** :: DeveloperApi ::
* Insert an [[Event]] in a non-blocking fashion.
*
* @param event An [[Event]] to be inserted
* @param appId App ID for the [[Event]] to be inserted to
*/
@DeveloperApi
def futureInsert(event: Event, appId: Int)(implicit ec: ExecutionContext):
Future[String] = futureInsert(event, appId, None)
/** :: DeveloperApi ::
* Insert an [[Event]] in a non-blocking fashion.
*
* @param event An [[Event]] to be inserted
* @param appId App ID for the [[Event]] to be inserted to
* @param channelId Optional channel ID for the [[Event]] to be inserted to
*/
@DeveloperApi
def futureInsert(
event: Event, appId: Int, channelId: Option[Int])(implicit ec: ExecutionContext): Future[String]
/** :: DeveloperApi ::
* Insert [[Event]]s in a non-blocking fashion.
*
* Default implementation of this method is calling
* [[LEvents.futureInsert(Event, Int, Option[Int])]] per event.
* Override in the storage implementation if the storage has
* a better way to insert multiple data at once.
*
* @param events [[Event]]s to be inserted
* @param appId App ID for the [[Event]]s to be inserted to
* @param channelId Optional channel ID for the [[Event]]s to be inserted to
*/
@DeveloperApi
def futureInsertBatch(events: Seq[Event], appId: Int, channelId: Option[Int])
(implicit ec: ExecutionContext): Future[Seq[String]] = {
val seq = events.map { event =>
futureInsert(event, appId, channelId)
}
Future.sequence(seq)
}
/** :: DeveloperApi ::
* Get an [[Event]] in a non-blocking fashion.
*
* @param eventId ID of the [[Event]]
* @param appId ID of the app that contains the [[Event]]
*/
@DeveloperApi
def futureGet(eventId: String, appId: Int)(implicit ec: ExecutionContext):
Future[Option[Event]] = futureGet(eventId, appId, None)
/** :: DeveloperApi ::
* Get an [[Event]] in a non-blocking fashion.
*
* @param eventId ID of the [[Event]]
* @param appId ID of the app that contains the [[Event]]
* @param channelId Optional channel ID that contains the [[Event]]
*/
@DeveloperApi
def futureGet(
eventId: String,
appId: Int,
channelId: Option[Int]
)(implicit ec: ExecutionContext): Future[Option[Event]]
/** :: DeveloperApi ::
* Delete an [[Event]] in a non-blocking fashion.
*
* @param eventId ID of the [[Event]]
* @param appId ID of the app that contains the [[Event]]
*/
@DeveloperApi
def futureDelete(eventId: String, appId: Int)(implicit ec: ExecutionContext):
Future[Boolean] = futureDelete(eventId, appId, None)
/** :: DeveloperApi ::
* Delete an [[Event]] in a non-blocking fashion.
*
* @param eventId ID of the [[Event]]
* @param appId ID of the app that contains the [[Event]]
* @param channelId Optional channel ID that contains the [[Event]]
*/
@DeveloperApi
def futureDelete(
eventId: String,
appId: Int,
channelId: Option[Int]
)(implicit ec: ExecutionContext): Future[Boolean]
/** :: DeveloperApi ::
* Reads from database and returns a Future of Iterator of [[Event]]s.
*
* @param appId return events of this app ID
* @param channelId return events of this channel ID (default channel if it's None)
* @param startTime return events with eventTime >= startTime
* @param untilTime return events with eventTime < untilTime
* @param entityType return events of this entityType
* @param entityId return events of this entityId
* @param eventNames return events with any of these event names.
* @param targetEntityType return events of this targetEntityType:
* - None means no restriction on targetEntityType
* - Some(None) means no targetEntityType for this event
* - Some(Some(x)) means targetEntityType should match x.
* @param targetEntityId return events of this targetEntityId
* - None means no restriction on targetEntityId
* - Some(None) means no targetEntityId for this event
* - Some(Some(x)) means targetEntityId should match x.
* @param limit Limit number of events. Get all events if None or Some(-1)
* @param reversed Reverse the order.
* - return oldest events first if None or Some(false) (default)
* - return latest events first if Some(true)
* @param ec ExecutionContext
* @return Future[Iterator[Event]]
*/
@DeveloperApi
def futureFind(
appId: Int,
channelId: Option[Int] = None,
startTime: Option[DateTime] = None,
untilTime: Option[DateTime] = None,
entityType: Option[String] = None,
entityId: Option[String] = None,
eventNames: Option[Seq[String]] = None,
targetEntityType: Option[Option[String]] = None,
targetEntityId: Option[Option[String]] = None,
limit: Option[Int] = None,
reversed: Option[Boolean] = None
)(implicit ec: ExecutionContext): Future[Iterator[Event]]
/** Aggregate properties of entities based on these special events:
* \\$set, \\$unset, \\$delete events.
* and returns a Future of Map of entityId to properties.
*
* @param appId use events of this app ID
* @param channelId use events of this channel ID (default channel if it's None)
* @param entityType aggregate properties of the entities of this entityType
* @param startTime use events with eventTime >= startTime
* @param untilTime use events with eventTime < untilTime
* @param required only keep entities with these required properties defined
* @param ec ExecutionContext
* @return Future[Map[String, PropertyMap]]
*/
private[predictionio] def futureAggregateProperties(
appId: Int,
channelId: Option[Int] = None,
entityType: String,
startTime: Option[DateTime] = None,
untilTime: Option[DateTime] = None,
required: Option[Seq[String]] = None)(implicit ec: ExecutionContext):
Future[Map[String, PropertyMap]] = {
futureFind(
appId = appId,
channelId = channelId,
startTime = startTime,
untilTime = untilTime,
entityType = Some(entityType),
eventNames = Some(LEventAggregator.eventNames)
).map{ eventIt =>
val dm = LEventAggregator.aggregateProperties(eventIt)
if (required.isDefined) {
dm.filter { case (k, v) =>
required.get.map(v.contains(_)).reduce(_ && _)
}
} else dm
}
}
/**
* :: Experimental ::
*
* Aggregate properties of the specified entity (entityType + entityId)
* based on these special events:
* \\$set, \\$unset, \\$delete events.
* and returns a Future of Option[PropertyMap]
*
* @param appId use events of this app ID
* @param channelId use events of this channel ID (default channel if it's None)
* @param entityType the entityType
* @param entityId the entityId
* @param startTime use events with eventTime >= startTime
* @param untilTime use events with eventTime < untilTime
* @param ec ExecutionContext
* @return Future[Option[PropertyMap]]
*/
@Experimental
private[predictionio] def futureAggregatePropertiesOfEntity(
appId: Int,
channelId: Option[Int] = None,
entityType: String,
entityId: String,
startTime: Option[DateTime] = None,
untilTime: Option[DateTime] = None)(implicit ec: ExecutionContext):
Future[Option[PropertyMap]] = {
futureFind(
appId = appId,
channelId = channelId,
startTime = startTime,
untilTime = untilTime,
entityType = Some(entityType),
entityId = Some(entityId),
eventNames = Some(LEventAggregator.eventNames)
).map{ eventIt =>
LEventAggregator.aggregatePropertiesSingle(eventIt)
}
}
// following is blocking
private[predictionio] def insert(event: Event, appId: Int,
channelId: Option[Int] = None,
timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
String = {
Await.result(futureInsert(event, appId, channelId), timeout)
}
private[predictionio] def get(eventId: String, appId: Int,
channelId: Option[Int] = None,
timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
Option[Event] = {
Await.result(futureGet(eventId, appId, channelId), timeout)
}
private[predictionio] def delete(eventId: String, appId: Int,
channelId: Option[Int] = None,
timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
Boolean = {
Await.result(futureDelete(eventId, appId, channelId), timeout)
}
/** reads from database and returns events iterator.
*
* @param appId return events of this app ID
* @param channelId return events of this channel ID (default channel if it's None)
* @param startTime return events with eventTime >= startTime
* @param untilTime return events with eventTime < untilTime
* @param entityType return events of this entityType
* @param entityId return events of this entityId
* @param eventNames return events with any of these event names.
* @param targetEntityType return events of this targetEntityType:
* - None means no restriction on targetEntityType
* - Some(None) means no targetEntityType for this event
* - Some(Some(x)) means targetEntityType should match x.
* @param targetEntityId return events of this targetEntityId
* - None means no restriction on targetEntityId
* - Some(None) means no targetEntityId for this event
* - Some(Some(x)) means targetEntityId should match x.
* @param limit Limit number of events. Get all events if None or Some(-1)
* @param reversed Reverse the order (should be used with both
* targetEntityType and targetEntityId specified)
* - return oldest events first if None or Some(false) (default)
* - return latest events first if Some(true)
* @param ec ExecutionContext
* @return Iterator[Event]
*/
private[predictionio] def find(
appId: Int,
channelId: Option[Int] = None,
startTime: Option[DateTime] = None,
untilTime: Option[DateTime] = None,
entityType: Option[String] = None,
entityId: Option[String] = None,
eventNames: Option[Seq[String]] = None,
targetEntityType: Option[Option[String]] = None,
targetEntityId: Option[Option[String]] = None,
limit: Option[Int] = None,
reversed: Option[Boolean] = None,
timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
Iterator[Event] = {
Await.result(futureFind(
appId = appId,
channelId = channelId,
startTime = startTime,
untilTime = untilTime,
entityType = entityType,
entityId = entityId,
eventNames = eventNames,
targetEntityType = targetEntityType,
targetEntityId = targetEntityId,
limit = limit,
reversed = reversed), timeout)
}
// NOTE: remove in next release
@deprecated("Use find() instead.", "0.9.2")
private[predictionio] def findLegacy(
appId: Int,
channelId: Option[Int] = None,
startTime: Option[DateTime] = None,
untilTime: Option[DateTime] = None,
entityType: Option[String] = None,
entityId: Option[String] = None,
eventNames: Option[Seq[String]] = None,
targetEntityType: Option[Option[String]] = None,
targetEntityId: Option[Option[String]] = None,
limit: Option[Int] = None,
reversed: Option[Boolean] = None,
timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
Either[StorageError, Iterator[Event]] = {
try {
// return Either for legacy usage
Right(Await.result(futureFind(
appId = appId,
channelId = channelId,
startTime = startTime,
untilTime = untilTime,
entityType = entityType,
entityId = entityId,
eventNames = eventNames,
targetEntityType = targetEntityType,
targetEntityId = targetEntityId,
limit = limit,
reversed = reversed), timeout))
} catch {
case e: TimeoutException => Left(StorageError(s"${e}"))
case e: Exception => Left(StorageError(s"${e}"))
}
}
/** reads events of the specified entity.
*
* @param appId return events of this app ID
* @param channelId return events of this channel ID (default channel if it's None)
* @param entityType return events of this entityType
* @param entityId return events of this entityId
* @param eventNames return events with any of these event names.
* @param targetEntityType return events of this targetEntityType:
* - None means no restriction on targetEntityType
* - Some(None) means no targetEntityType for this event
* - Some(Some(x)) means targetEntityType should match x.
* @param targetEntityId return events of this targetEntityId
* - None means no restriction on targetEntityId
* - Some(None) means no targetEntityId for this event
* - Some(Some(x)) means targetEntityId should match x.
* @param startTime return events with eventTime >= startTime
* @param untilTime return events with eventTime < untilTime
* @param limit Limit number of events. Get all events if None or Some(-1)
* @param latest Return latest event first (default true)
* @param ec ExecutionContext
* @return Either[StorageError, Iterator[Event]]
*/
// NOTE: remove this function in next release
@deprecated("Use LEventStore.findByEntity() instead.", "0.9.2")
def findSingleEntity(
appId: Int,
channelId: Option[Int] = None,
entityType: String,
entityId: String,
eventNames: Option[Seq[String]] = None,
targetEntityType: Option[Option[String]] = None,
targetEntityId: Option[Option[String]] = None,
startTime: Option[DateTime] = None,
untilTime: Option[DateTime] = None,
limit: Option[Int] = None,
latest: Boolean = true,
timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
Either[StorageError, Iterator[Event]] = {
findLegacy(
appId = appId,
channelId = channelId,
startTime = startTime,
untilTime = untilTime,
entityType = Some(entityType),
entityId = Some(entityId),
eventNames = eventNames,
targetEntityType = targetEntityType,
targetEntityId = targetEntityId,
limit = limit,
reversed = Some(latest),
timeout = timeout)
}
/** Aggregate properties of entities based on these special events:
* \\$set, \\$unset, \\$delete events.
* and returns a Map of entityId to properties.
*
* @param appId use events of this app ID
* @param channelId use events of this channel ID (default channel if it's None)
* @param entityType aggregate properties of the entities of this entityType
* @param startTime use events with eventTime >= startTime
* @param untilTime use events with eventTime < untilTime
* @param required only keep entities with these required properties defined
* @param ec ExecutionContext
* @return Map[String, PropertyMap]
*/
private[predictionio] def aggregateProperties(
appId: Int,
channelId: Option[Int] = None,
entityType: String,
startTime: Option[DateTime] = None,
untilTime: Option[DateTime] = None,
required: Option[Seq[String]] = None,
timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
Map[String, PropertyMap] = {
Await.result(futureAggregateProperties(
appId = appId,
channelId = channelId,
entityType = entityType,
startTime = startTime,
untilTime = untilTime,
required = required), timeout)
}
/**
* :: Experimental ::
*
* Aggregate properties of the specified entity (entityType + entityId)
* based on these special events:
* \\$set, \\$unset, \\$delete events.
* and returns Option[PropertyMap]
*
* @param appId use events of this app ID
* @param channelId use events of this channel ID
* @param entityType the entityType
* @param entityId the entityId
* @param startTime use events with eventTime >= startTime
* @param untilTime use events with eventTime < untilTime
* @param ec ExecutionContext
* @return Future[Option[PropertyMap]]
*/
@Experimental
private[predictionio] def aggregatePropertiesOfEntity(
appId: Int,
channelId: Option[Int] = None,
entityType: String,
entityId: String,
startTime: Option[DateTime] = None,
untilTime: Option[DateTime] = None,
timeout: Duration = defaultTimeout)(implicit ec: ExecutionContext):
Option[PropertyMap] = {
Await.result(futureAggregatePropertiesOfEntity(
appId = appId,
channelId = channelId,
entityType = entityType,
entityId = entityId,
startTime = startTime,
untilTime = untilTime), timeout)
}
}
| PredictionIO/PredictionIO | data/src/main/scala/org/apache/predictionio/data/storage/LEvents.scala | Scala | apache-2.0 | 19,424 |
/*
* Copyright 2001-2015 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.examples.asyncwordspec.noargasynctest
import java.io.File
import org.scalatest._
import scala.concurrent.Future
class ExampleSpec extends AsyncWordSpec {
override def withFixture(test: NoArgAsyncTest) = {
super.withFixture(test) onFailedThen { _ =>
val currDir = new File(".")
val fileNames = currDir.list()
info("Dir snapshot: " + fileNames.mkString(", "))
}
}
def addSoon(addends: Int*): Future[Int] = Future { addends.sum }
"This test" should {
"succeed" in {
addSoon(1, 1) map { sum => assert(sum == 2) }
}
"fail" in {
addSoon(1, 1) map { sum => assert(sum == 3) }
}
}
}
| dotty-staging/scalatest | examples/src/test/scala/org/scalatest/examples/asyncwordspec/noargasynctest/ExampleSpec.scala | Scala | apache-2.0 | 1,271 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.catalog
import java.net.URI
import java.time.ZoneOffset
import java.util.Date
import scala.collection.mutable
import scala.util.control.NonFatal
import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException
import org.apache.spark.sql.catalyst.{FunctionIdentifier, InternalRow, TableIdentifier}
import org.apache.spark.sql.catalyst.analysis.MultiInstanceRelation
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeMap, AttributeReference, Cast, ExprId, Literal}
import org.apache.spark.sql.catalyst.plans.logical._
import org.apache.spark.sql.catalyst.plans.logical.statsEstimation.EstimationUtils
import org.apache.spark.sql.catalyst.util.{CaseInsensitiveMap, DateFormatter, DateTimeUtils, TimestampFormatter}
import org.apache.spark.sql.catalyst.util.quoteIdentifier
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
/**
* A function defined in the catalog.
*
* @param identifier name of the function
* @param className fully qualified class name, e.g. "org.apache.spark.util.MyFunc"
* @param resources resource types and Uris used by the function
*/
case class CatalogFunction(
identifier: FunctionIdentifier,
className: String,
resources: Seq[FunctionResource])
/**
* Storage format, used to describe how a partition or a table is stored.
*/
case class CatalogStorageFormat(
locationUri: Option[URI],
inputFormat: Option[String],
outputFormat: Option[String],
serde: Option[String],
compressed: Boolean,
properties: Map[String, String]) {
override def toString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("Storage(", ", ", ")")
}
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
val map = new mutable.LinkedHashMap[String, String]()
locationUri.foreach(l => map.put("Location", l.toString))
serde.foreach(map.put("Serde Library", _))
inputFormat.foreach(map.put("InputFormat", _))
outputFormat.foreach(map.put("OutputFormat", _))
if (compressed) map.put("Compressed", "")
CatalogUtils.maskCredentials(properties) match {
case props if props.isEmpty => // No-op
case props =>
map.put("Storage Properties", props.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]"))
}
map
}
}
object CatalogStorageFormat {
/** Empty storage format for default values and copies. */
val empty = CatalogStorageFormat(locationUri = None, inputFormat = None,
outputFormat = None, serde = None, compressed = false, properties = Map.empty)
}
/**
* A partition (Hive style) defined in the catalog.
*
* @param spec partition spec values indexed by column name
* @param storage storage format of the partition
* @param parameters some parameters for the partition
* @param createTime creation time of the partition, in milliseconds
* @param lastAccessTime last access time, in milliseconds
* @param stats optional statistics (number of rows, total size, etc.)
*/
case class CatalogTablePartition(
spec: CatalogTypes.TablePartitionSpec,
storage: CatalogStorageFormat,
parameters: Map[String, String] = Map.empty,
createTime: Long = System.currentTimeMillis,
lastAccessTime: Long = -1,
stats: Option[CatalogStatistics] = None) {
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
val map = new mutable.LinkedHashMap[String, String]()
val specString = spec.map { case (k, v) => s"$k=$v" }.mkString(", ")
map.put("Partition Values", s"[$specString]")
map ++= storage.toLinkedHashMap
if (parameters.nonEmpty) {
map.put("Partition Parameters", s"{${parameters.map(p => p._1 + "=" + p._2).mkString(", ")}}")
}
map.put("Created Time", new Date(createTime).toString)
val lastAccess = {
if (-1 == lastAccessTime) "UNKNOWN" else new Date(lastAccessTime).toString
}
map.put("Last Access", lastAccess)
stats.foreach(s => map.put("Partition Statistics", s.simpleString))
map
}
override def toString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("CatalogPartition(\n\t", "\n\t", ")")
}
/** Readable string representation for the CatalogTablePartition. */
def simpleString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("", "\n", "")
}
/** Return the partition location, assuming it is specified. */
def location: URI = storage.locationUri.getOrElse {
val specString = spec.map { case (k, v) => s"$k=$v" }.mkString(", ")
throw new AnalysisException(s"Partition [$specString] did not specify locationUri")
}
/**
* Given the partition schema, returns a row with that schema holding the partition values.
*/
def toRow(partitionSchema: StructType, defaultTimeZondId: String): InternalRow = {
val caseInsensitiveProperties = CaseInsensitiveMap(storage.properties)
val timeZoneId = caseInsensitiveProperties.getOrElse(
DateTimeUtils.TIMEZONE_OPTION, defaultTimeZondId)
InternalRow.fromSeq(partitionSchema.map { field =>
val partValue = if (spec(field.name) == ExternalCatalogUtils.DEFAULT_PARTITION_NAME) {
null
} else {
spec(field.name)
}
Cast(Literal(partValue), field.dataType, Option(timeZoneId)).eval()
})
}
}
/**
* A container for bucketing information.
* Bucketing is a technology for decomposing data sets into more manageable parts, and the number
* of buckets is fixed so it does not fluctuate with data.
*
* @param numBuckets number of buckets.
* @param bucketColumnNames the names of the columns that used to generate the bucket id.
* @param sortColumnNames the names of the columns that used to sort data in each bucket.
*/
case class BucketSpec(
numBuckets: Int,
bucketColumnNames: Seq[String],
sortColumnNames: Seq[String]) {
def conf: SQLConf = SQLConf.get
if (numBuckets <= 0 || numBuckets > conf.bucketingMaxBuckets) {
throw new AnalysisException(
s"Number of buckets should be greater than 0 but less than or equal to " +
s"bucketing.maxBuckets (`${conf.bucketingMaxBuckets}`). Got `$numBuckets`")
}
override def toString: String = {
val bucketString = s"bucket columns: [${bucketColumnNames.mkString(", ")}]"
val sortString = if (sortColumnNames.nonEmpty) {
s", sort columns: [${sortColumnNames.mkString(", ")}]"
} else {
""
}
s"$numBuckets buckets, $bucketString$sortString"
}
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
mutable.LinkedHashMap[String, String](
"Num Buckets" -> numBuckets.toString,
"Bucket Columns" -> bucketColumnNames.map(quoteIdentifier).mkString("[", ", ", "]"),
"Sort Columns" -> sortColumnNames.map(quoteIdentifier).mkString("[", ", ", "]")
)
}
}
/**
* A table defined in the catalog.
*
* Note that Hive's metastore also tracks skewed columns. We should consider adding that in the
* future once we have a better understanding of how we want to handle skewed columns.
*
* @param provider the name of the data source provider for this table, e.g. parquet, json, etc.
* Can be None if this table is a View, should be "hive" for hive serde tables.
* @param unsupportedFeatures is a list of string descriptions of features that are used by the
* underlying table but not supported by Spark SQL yet.
* @param tracksPartitionsInCatalog whether this table's partition metadata is stored in the
* catalog. If false, it is inferred automatically based on file
* structure.
* @param schemaPreservesCase Whether or not the schema resolved for this table is case-sensitive.
* When using a Hive Metastore, this flag is set to false if a case-
* sensitive schema was unable to be read from the table properties.
* Used to trigger case-sensitive schema inference at query time, when
* configured.
* @param ignoredProperties is a list of table properties that are used by the underlying table
* but ignored by Spark SQL yet.
* @param createVersion records the version of Spark that created this table metadata. The default
* is an empty string. We expect it will be read from the catalog or filled by
* ExternalCatalog.createTable. For temporary views, the value will be empty.
*/
case class CatalogTable(
identifier: TableIdentifier,
tableType: CatalogTableType,
storage: CatalogStorageFormat,
schema: StructType,
provider: Option[String] = None,
partitionColumnNames: Seq[String] = Seq.empty,
bucketSpec: Option[BucketSpec] = None,
owner: String = "",
createTime: Long = System.currentTimeMillis,
lastAccessTime: Long = -1,
createVersion: String = "",
properties: Map[String, String] = Map.empty,
stats: Option[CatalogStatistics] = None,
viewText: Option[String] = None,
comment: Option[String] = None,
unsupportedFeatures: Seq[String] = Seq.empty,
tracksPartitionsInCatalog: Boolean = false,
schemaPreservesCase: Boolean = true,
ignoredProperties: Map[String, String] = Map.empty,
viewOriginalText: Option[String] = None) {
import CatalogTable._
/**
* schema of this table's partition columns
*/
def partitionSchema: StructType = {
val partitionFields = schema.takeRight(partitionColumnNames.length)
assert(partitionFields.map(_.name) == partitionColumnNames)
StructType(partitionFields)
}
/**
* schema of this table's data columns
*/
def dataSchema: StructType = {
val dataFields = schema.dropRight(partitionColumnNames.length)
StructType(dataFields)
}
/** Return the database this table was specified to belong to, assuming it exists. */
def database: String = identifier.database.getOrElse {
throw new AnalysisException(s"table $identifier did not specify database")
}
/** Return the table location, assuming it is specified. */
def location: URI = storage.locationUri.getOrElse {
throw new AnalysisException(s"table $identifier did not specify locationUri")
}
/** Return the fully qualified name of this table, assuming the database was specified. */
def qualifiedName: String = identifier.unquotedString
/**
* Return the default database name we use to resolve a view, should be None if the CatalogTable
* is not a View or created by older versions of Spark(before 2.2.0).
*/
def viewDefaultDatabase: Option[String] = properties.get(VIEW_DEFAULT_DATABASE)
/**
* Return the output column names of the query that creates a view, the column names are used to
* resolve a view, should be empty if the CatalogTable is not a View or created by older versions
* of Spark(before 2.2.0).
*/
def viewQueryColumnNames: Seq[String] = {
for {
numCols <- properties.get(VIEW_QUERY_OUTPUT_NUM_COLUMNS).toSeq
index <- 0 until numCols.toInt
} yield properties.getOrElse(
s"$VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX$index",
throw new AnalysisException("Corrupted view query output column names in catalog: " +
s"$numCols parts expected, but part $index is missing.")
)
}
/** Syntactic sugar to update a field in `storage`. */
def withNewStorage(
locationUri: Option[URI] = storage.locationUri,
inputFormat: Option[String] = storage.inputFormat,
outputFormat: Option[String] = storage.outputFormat,
compressed: Boolean = false,
serde: Option[String] = storage.serde,
properties: Map[String, String] = storage.properties): CatalogTable = {
copy(storage = CatalogStorageFormat(
locationUri, inputFormat, outputFormat, serde, compressed, properties))
}
def toLinkedHashMap: mutable.LinkedHashMap[String, String] = {
val map = new mutable.LinkedHashMap[String, String]()
val tableProperties = properties.map(p => p._1 + "=" + p._2).mkString("[", ", ", "]")
val partitionColumns = partitionColumnNames.map(quoteIdentifier).mkString("[", ", ", "]")
identifier.database.foreach(map.put("Database", _))
map.put("Table", identifier.table)
if (owner != null && owner.nonEmpty) map.put("Owner", owner)
map.put("Created Time", new Date(createTime).toString)
map.put("Last Access", new Date(lastAccessTime).toString)
map.put("Created By", "Spark " + createVersion)
map.put("Type", tableType.name)
provider.foreach(map.put("Provider", _))
bucketSpec.foreach(map ++= _.toLinkedHashMap)
comment.foreach(map.put("Comment", _))
if (tableType == CatalogTableType.VIEW) {
viewText.foreach(map.put("View Text", _))
viewOriginalText.foreach(map.put("View Original Text", _))
viewDefaultDatabase.foreach(map.put("View Default Database", _))
if (viewQueryColumnNames.nonEmpty) {
map.put("View Query Output Columns", viewQueryColumnNames.mkString("[", ", ", "]"))
}
}
if (properties.nonEmpty) map.put("Table Properties", tableProperties)
stats.foreach(s => map.put("Statistics", s.simpleString))
map ++= storage.toLinkedHashMap
if (tracksPartitionsInCatalog) map.put("Partition Provider", "Catalog")
if (partitionColumnNames.nonEmpty) map.put("Partition Columns", partitionColumns)
if (schema.nonEmpty) map.put("Schema", schema.treeString)
map
}
override def toString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("CatalogTable(\n", "\n", ")")
}
/** Readable string representation for the CatalogTable. */
def simpleString: String = {
toLinkedHashMap.map { case ((key, value)) =>
if (value.isEmpty) key else s"$key: $value"
}.mkString("", "\n", "")
}
}
object CatalogTable {
val VIEW_DEFAULT_DATABASE = "view.default.database"
val VIEW_QUERY_OUTPUT_PREFIX = "view.query.out."
val VIEW_QUERY_OUTPUT_NUM_COLUMNS = VIEW_QUERY_OUTPUT_PREFIX + "numCols"
val VIEW_QUERY_OUTPUT_COLUMN_NAME_PREFIX = VIEW_QUERY_OUTPUT_PREFIX + "col."
}
/**
* This class of statistics is used in [[CatalogTable]] to interact with metastore.
* We define this new class instead of directly using [[Statistics]] here because there are no
* concepts of attributes in catalog.
*/
case class CatalogStatistics(
sizeInBytes: BigInt,
rowCount: Option[BigInt] = None,
colStats: Map[String, CatalogColumnStat] = Map.empty) {
/**
* Convert [[CatalogStatistics]] to [[Statistics]], and match column stats to attributes based
* on column names.
*/
def toPlanStats(planOutput: Seq[Attribute], cboEnabled: Boolean): Statistics = {
if (cboEnabled && rowCount.isDefined) {
val attrStats = AttributeMap(planOutput
.flatMap(a => colStats.get(a.name).map(a -> _.toPlanStat(a.name, a.dataType))))
// Estimate size as number of rows * row size.
val size = EstimationUtils.getOutputSize(planOutput, rowCount.get, attrStats)
Statistics(sizeInBytes = size, rowCount = rowCount, attributeStats = attrStats)
} else {
// When CBO is disabled or the table doesn't have other statistics, we apply the size-only
// estimation strategy and only propagate sizeInBytes in statistics.
Statistics(sizeInBytes = sizeInBytes)
}
}
/** Readable string representation for the CatalogStatistics. */
def simpleString: String = {
val rowCountString = if (rowCount.isDefined) s", ${rowCount.get} rows" else ""
s"$sizeInBytes bytes$rowCountString"
}
}
/**
* This class of statistics for a column is used in [[CatalogTable]] to interact with metastore.
*/
case class CatalogColumnStat(
distinctCount: Option[BigInt] = None,
min: Option[String] = None,
max: Option[String] = None,
nullCount: Option[BigInt] = None,
avgLen: Option[Long] = None,
maxLen: Option[Long] = None,
histogram: Option[Histogram] = None,
version: Int = CatalogColumnStat.VERSION) {
/**
* Returns a map from string to string that can be used to serialize the column stats.
* The key is the name of the column and name of the field (e.g. "colName.distinctCount"),
* and the value is the string representation for the value.
* min/max values are stored as Strings. They can be deserialized using
* [[CatalogColumnStat.fromExternalString]].
*
* As part of the protocol, the returned map always contains a key called "version".
* Any of the fields that are null (None) won't appear in the map.
*/
def toMap(colName: String): Map[String, String] = {
val map = new scala.collection.mutable.HashMap[String, String]
map.put(s"${colName}.${CatalogColumnStat.KEY_VERSION}", CatalogColumnStat.VERSION.toString)
distinctCount.foreach { v =>
map.put(s"${colName}.${CatalogColumnStat.KEY_DISTINCT_COUNT}", v.toString)
}
nullCount.foreach { v =>
map.put(s"${colName}.${CatalogColumnStat.KEY_NULL_COUNT}", v.toString)
}
avgLen.foreach { v => map.put(s"${colName}.${CatalogColumnStat.KEY_AVG_LEN}", v.toString) }
maxLen.foreach { v => map.put(s"${colName}.${CatalogColumnStat.KEY_MAX_LEN}", v.toString) }
min.foreach { v => map.put(s"${colName}.${CatalogColumnStat.KEY_MIN_VALUE}", v) }
max.foreach { v => map.put(s"${colName}.${CatalogColumnStat.KEY_MAX_VALUE}", v) }
histogram.foreach { h =>
map.put(s"${colName}.${CatalogColumnStat.KEY_HISTOGRAM}", HistogramSerializer.serialize(h))
}
map.toMap
}
/** Convert [[CatalogColumnStat]] to [[ColumnStat]]. */
def toPlanStat(
colName: String,
dataType: DataType): ColumnStat =
ColumnStat(
distinctCount = distinctCount,
min = min.map(CatalogColumnStat.fromExternalString(_, colName, dataType, version)),
max = max.map(CatalogColumnStat.fromExternalString(_, colName, dataType, version)),
nullCount = nullCount,
avgLen = avgLen,
maxLen = maxLen,
histogram = histogram,
version = version)
}
object CatalogColumnStat extends Logging {
// List of string keys used to serialize CatalogColumnStat
val KEY_VERSION = "version"
private val KEY_DISTINCT_COUNT = "distinctCount"
private val KEY_MIN_VALUE = "min"
private val KEY_MAX_VALUE = "max"
private val KEY_NULL_COUNT = "nullCount"
private val KEY_AVG_LEN = "avgLen"
private val KEY_MAX_LEN = "maxLen"
private val KEY_HISTOGRAM = "histogram"
val VERSION = 2
private def getTimestampFormatter(): TimestampFormatter = {
TimestampFormatter(format = "yyyy-MM-dd HH:mm:ss.SSSSSS", zoneId = ZoneOffset.UTC)
}
/**
* Converts from string representation of data type to the corresponding Catalyst data type.
*/
def fromExternalString(s: String, name: String, dataType: DataType, version: Int): Any = {
dataType match {
case BooleanType => s.toBoolean
case DateType if version == 1 => DateTimeUtils.fromJavaDate(java.sql.Date.valueOf(s))
case DateType => DateFormatter().parse(s)
case TimestampType if version == 1 =>
DateTimeUtils.fromJavaTimestamp(java.sql.Timestamp.valueOf(s))
case TimestampType => getTimestampFormatter().parse(s)
case ByteType => s.toByte
case ShortType => s.toShort
case IntegerType => s.toInt
case LongType => s.toLong
case FloatType => s.toFloat
case DoubleType => s.toDouble
case _: DecimalType => Decimal(s)
// This version of Spark does not use min/max for binary/string types so we ignore it.
case BinaryType | StringType => null
case _ =>
throw new AnalysisException("Column statistics deserialization is not supported for " +
s"column $name of data type: $dataType.")
}
}
/**
* Converts the given value from Catalyst data type to string representation of external
* data type.
*/
def toExternalString(v: Any, colName: String, dataType: DataType): String = {
val externalValue = dataType match {
case DateType => DateFormatter().format(v.asInstanceOf[Int])
case TimestampType => getTimestampFormatter().format(v.asInstanceOf[Long])
case BooleanType | _: IntegralType | FloatType | DoubleType => v
case _: DecimalType => v.asInstanceOf[Decimal].toJavaBigDecimal
// This version of Spark does not use min/max for binary/string types so we ignore it.
case _ =>
throw new AnalysisException("Column statistics serialization is not supported for " +
s"column $colName of data type: $dataType.")
}
externalValue.toString
}
/**
* Creates a [[CatalogColumnStat]] object from the given map.
* This is used to deserialize column stats from some external storage.
* The serialization side is defined in [[CatalogColumnStat.toMap]].
*/
def fromMap(
table: String,
colName: String,
map: Map[String, String]): Option[CatalogColumnStat] = {
try {
Some(CatalogColumnStat(
distinctCount = map.get(s"${colName}.${KEY_DISTINCT_COUNT}").map(v => BigInt(v.toLong)),
min = map.get(s"${colName}.${KEY_MIN_VALUE}"),
max = map.get(s"${colName}.${KEY_MAX_VALUE}"),
nullCount = map.get(s"${colName}.${KEY_NULL_COUNT}").map(v => BigInt(v.toLong)),
avgLen = map.get(s"${colName}.${KEY_AVG_LEN}").map(_.toLong),
maxLen = map.get(s"${colName}.${KEY_MAX_LEN}").map(_.toLong),
histogram = map.get(s"${colName}.${KEY_HISTOGRAM}").map(HistogramSerializer.deserialize),
version = map(s"${colName}.${KEY_VERSION}").toInt
))
} catch {
case NonFatal(e) =>
logWarning(s"Failed to parse column statistics for column ${colName} in table $table", e)
None
}
}
}
case class CatalogTableType private(name: String)
object CatalogTableType {
val EXTERNAL = new CatalogTableType("EXTERNAL")
val MANAGED = new CatalogTableType("MANAGED")
val VIEW = new CatalogTableType("VIEW")
}
/**
* A database defined in the catalog.
*/
case class CatalogDatabase(
name: String,
description: String,
locationUri: URI,
properties: Map[String, String])
object CatalogTypes {
/**
* Specifications of a table partition. Mapping column name to column value.
*/
type TablePartitionSpec = Map[String, String]
/**
* Initialize an empty spec.
*/
lazy val emptyTablePartitionSpec: TablePartitionSpec = Map.empty[String, String]
}
/**
* A placeholder for a table relation, which will be replaced by concrete relation like
* `LogicalRelation` or `HiveTableRelation`, during analysis.
*/
case class UnresolvedCatalogRelation(tableMeta: CatalogTable) extends LeafNode {
assert(tableMeta.identifier.database.isDefined)
override lazy val resolved: Boolean = false
override def output: Seq[Attribute] = Nil
}
/**
* A `LogicalPlan` that represents a hive table.
*
* TODO: remove this after we completely make hive as a data source.
*/
case class HiveTableRelation(
tableMeta: CatalogTable,
dataCols: Seq[AttributeReference],
partitionCols: Seq[AttributeReference]) extends LeafNode with MultiInstanceRelation {
assert(tableMeta.identifier.database.isDefined)
assert(tableMeta.partitionSchema.sameType(partitionCols.toStructType))
assert(tableMeta.dataSchema.sameType(dataCols.toStructType))
// The partition column should always appear after data columns.
override def output: Seq[AttributeReference] = dataCols ++ partitionCols
def isPartitioned: Boolean = partitionCols.nonEmpty
override def doCanonicalize(): HiveTableRelation = copy(
tableMeta = tableMeta.copy(
storage = CatalogStorageFormat.empty,
createTime = -1
),
dataCols = dataCols.zipWithIndex.map {
case (attr, index) => attr.withExprId(ExprId(index))
},
partitionCols = partitionCols.zipWithIndex.map {
case (attr, index) => attr.withExprId(ExprId(index + dataCols.length))
}
)
override def computeStats(): Statistics = {
tableMeta.stats.map(_.toPlanStats(output, conf.cboEnabled)).getOrElse {
throw new IllegalStateException("table stats must be specified.")
}
}
override def newInstance(): HiveTableRelation = copy(
dataCols = dataCols.map(_.newInstance()),
partitionCols = partitionCols.map(_.newInstance()))
}
| aosagie/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/interface.scala | Scala | apache-2.0 | 25,281 |
package com.github.ldaniels528.trifecta.modules.etl.io.record.impl
import com.github.ldaniels528.commons.helpers.OptionHelper.Risky._
import com.github.ldaniels528.trifecta.modules.etl.io.Scope
import com.github.ldaniels528.trifecta.modules.etl.io.record.{DataTypes, Field}
import org.scalatest.Matchers._
import org.scalatest.mock.MockitoSugar
import org.scalatest.{BeforeAndAfterEach, FeatureSpec, GivenWhenThen}
/**
* Fixed-Length Record Specification
* @author [email protected]
*/
class FixedLengthRecordSpec() extends FeatureSpec with BeforeAndAfterEach with GivenWhenThen with MockitoSugar {
info("As a FixedLengthRecord instance")
info("I want to be able to transform text into a fixed-length string (and vice versa)")
feature("Transforms text to fixed-length records") {
scenario("Import a fixed-length stock quote into a fixed-length record") {
Given("a text string in fixed-length format")
val inText = "AAPL 96.76 96.99 95.89 109.99"
And("a fixed-length record")
val record = FixedRecord(
id = "fixed_rec",
fields = Seq(
Field(name = "symbol", `type` = DataTypes.STRING, length = 10),
Field(name = "open", `type` = DataTypes.STRING, length = 10),
Field(name = "close", `type` = DataTypes.STRING, length = 10),
Field(name = "low", `type` = DataTypes.STRING, length = 10),
Field(name = "high", `type` = DataTypes.STRING, length = 10)
))
And("a scope")
implicit val scope = new Scope()
When("the text is consumed")
val dataSet = record.fromText(inText)
dataSet.data foreach {
case (name, Some(value)) =>
info(s"name: $name, value: '$value'")
case (name, None) =>
info(s"name: $name, value is null")
}
Then("the toText method should return the fixed-length string")
val outText = record.toText(dataSet)
info(s"[$outText]")
outText.trim shouldBe inText
And(s"the record must contain the values")
val validation = List("symbol" -> "AAPL", "open" -> "96.76", "close" -> "96.99", "low" -> "95.89", "high" -> "109.99")
dataSet.data shouldBe validation.map { case (k, v) => (k, Option(v)) }
}
}
} | ldaniels528/trifecta | app-cli/src/test/scala/com/github/ldaniels528/trifecta/modules/etl/io/record/impl/FixedLengthRecordSpec.scala | Scala | apache-2.0 | 2,268 |
package org.t3as.redact.service
import javax.servlet.ServletConfig
import javax.servlet.http.HttpServlet
import io.swagger.jaxrs.config.BeanConfig
/** Hard-coded config for now.
* Could get from a properties file and allow overriding with env vars
* (as in https://github.inside.nicta.com.au/nbacon/social-watch/tree/master/analytics).
*/
class MySwaggerBootstrap extends HttpServlet {
override def init(c: ServletConfig) = {
super.init(c)
val b = new BeanConfig
b.setVersion("1.0")
b.setContact("[email protected]")
b.setTitle("PDF Redaction")
b.setDescription("Web services for named entity recognition and PDF redaction")
b.setLicense("AGPL")
b.setLicenseUrl("http://www.gnu.org/licenses/agpl-3.0.en.html")
// Omit to use current scheme & host
// b.setSchemes(Array("http"))
// b.setHost("http://redact.t3as.org/")
b.setBasePath("/rest");
b.setResourcePackage("org.t3as.redact.service") // was io.swagger.resources
b.setScan(true)
b.setPrettyPrint(true)
}
} | NICTA/t3as-redact | src/main/scala/org/t3as/redact/service/MySwaggerBootstrap.scala | Scala | agpl-3.0 | 1,098 |
package PaperCode.Sec4OA
//BEGIN_OVERVIEW_OA_ALG
trait ExprAlg[E] {
def lit(n: Int): E
def add(e1: E, e2: E): E
}
//END_OVERVIEW_OA_ALG
//BEGIN_OVERVIEW_OA_PRINT
trait Print extends ExprAlg[String] {
def lit(n: Int) = n.toString
def add(e1: String, e2: String) = "(" + e1 + " + " + e2 + ")"
}
//END_OVERVIEW_OA_PRINT
//BEGIN_OVERVIEW_OA_EVAL
trait Eval extends ExprAlg[Int] {
def lit(n: Int) = n
def add(e1: Int, e2: Int) = e1 + e2
}
//END_OVERVIEW_OA_EVAL
//BEGIN_OVERVIEW_OA_ALGEXT
trait VarExprAlg[E] extends ExprAlg[E] {
def varE(x: String): E
}
//END_OVERVIEW_OA_ALGEXT
//BEGIN_OVERVIEW_OA_EXTPRINT
trait VarExprPrint extends VarExprAlg[String] with Print {
def varE(x: String) = x
}
//END_OVERVIEW_OA_EXTPRINT
object Code2 {
//BEGIN_OVERVIEW_OA_MAKEEXP
def makeExp[E](alg: VarExprAlg[E]): E = alg.add(alg.lit(1), alg.varE("x"))
//END_OVERVIEW_OA_MAKEEXP
//BEGIN_OVERVIEW_OA_REFACTOR
trait Refactor[E] extends VarExprAlg[E] {
val alg: ExprAlg[E]
val env: Map[String, Int]
def lit(n: Int): E = alg.lit(n)
def add(e1: E, e2: E): E = alg.add(e1, e2)
def varE(x: String): E = alg.lit(env(x))
}
val r = makeExp(new Refactor[String] {
override val alg = new Print {}
override val env = Map("x" -> 2)
}) // "(1 + 2)"
//END_OVERVIEW_OA_REFACTOR
def main(args: Array[String]): Unit = {
println(r)
}
}
| hy-zhang/parser | Scala/Parser/src/PaperCode/Sec4OA/Code2.scala | Scala | bsd-3-clause | 1,347 |
/**
* Copyright (C) 2012 Inria, University Lille 1.
*
* This file is part of PowerAPI.
*
* PowerAPI is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* PowerAPI is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with PowerAPI. If not, see <http://www.gnu.org/licenses/>.
*
* Contact: [email protected].
*/
package fr.inria.powerapi.example.cpumonitor.linux
import fr.inria.powerapi.core.Energy
import fr.inria.powerapi.core.Process
import fr.inria.powerapi.core.Tick
import fr.inria.powerapi.core.TickSubscription
import fr.inria.powerapi.formula.cpu.api.CpuFormulaMessage
import fr.inria.powerapi.listener.cpu.jfreechart.Chart
import fr.inria.powerapi.listener.cpu.jfreechart.CpuListener
/**
* CPU Listener which filter received information before display it into a graph.
*
* @author abourdon
*/
class FilteredChart extends CpuListener {
val powers = collection.mutable.HashMap[Process, Double]()
override def acquire = {
case cpuFormulaMessage: CpuFormulaMessage => {
val process = cpuFormulaMessage.tick.subscription.process
val old = powers getOrElse (process, Double.PositiveInfinity)
val now = cpuFormulaMessage.energy.power
// We only process the chart if the difference between old and new value are over 10%
if (math.min(now, old) / math.max(now, old) < 0.1) {
powers += (process -> now)
Chart.process(cpuFormulaMessage)
}
}
}
}
/**
* CPU listener which gather all CpuFormulaMessage in order to compute only one result
* as the sum of all received CpuFormulaMessage for a specific timestamp.
*
* @author abourdon
*/
class GatheredChart extends CpuListener {
val cache = collection.mutable.HashMap[Long, Double]()
override def process(cpuFormulaMessage: CpuFormulaMessage) {
def gatherPowers(cpuFormulaMessage: CpuFormulaMessage) {
cache(cpuFormulaMessage.tick.timestamp) += cpuFormulaMessage.energy.power
}
def displayCache(cpuFormulaMessage: CpuFormulaMessage) {
if (!cache.isEmpty) {
val timestamp = cache.keySet.toIndexedSeq(0)
Chart.process(
CpuFormulaMessage(
Energy.fromPower(cache(timestamp)),
Tick(TickSubscription(Process(-1), cpuFormulaMessage.tick.subscription.duration))))
}
}
def updateTimestamp(cpuFormulaMessage: CpuFormulaMessage) {
if (!cache.isEmpty) {
val oldTimestamp = cache.keySet.toIndexedSeq(0)
cache -= oldTimestamp
}
cache += (cpuFormulaMessage.tick.timestamp -> 0)
}
if (cache.contains(cpuFormulaMessage.tick.timestamp)) {
gatherPowers(cpuFormulaMessage)
} else {
displayCache(cpuFormulaMessage)
updateTimestamp(cpuFormulaMessage)
}
}
} | abourdon/powerapi-akka | examples/example-cpumonitor-linux/src/main/scala/fr/inria/powerapi/example/cpumonitor/linux/Listeners.scala | Scala | agpl-3.0 | 3,214 |
package com.github.agaro1121.parsing.payloads
object LeagueRule {
val payload =
"""
|{
| "id": "4",
| "name": "Hardcore",
| "description": "A character killed in Hardcore is moved to its parent league."
|}
""".stripMargin
}
| agaro1121/PathOfExileApiClient | src/test/scala/com/github/agaro1121/parsing/payloads/LeagueRule.scala | Scala | mit | 270 |
class Open
class Door[State] {
def close[Phantom >: State <: Open]: Int = 0
def open[Phantom >: State <: Open](): Int = 0
}
class Test {
val door = new Door[AnyRef]
// the error here happens later (at refchecks)
println(door.close.toString)
// the errors below happen when typing implicit conversions
println(door.close.toString())
println(door.close == 0)
println(door.open().toString)
println(door.open().toString())
println(door.open() == 0)
}
| scala/scala | test/files/neg/t12413.scala | Scala | apache-2.0 | 471 |
// Visual test for ansiescape
// Run from Activator console:
// :load src/test/visual-test.scala
import me.pmatiello.ansiescape.ANSI._
println("Visual Test")
println()
println(black("black"))
println(red("red"))
println(green("green"))
println(yellow("yellow"))
println(blue("blue"))
println(purple("purple"))
println(cyan("cyan"))
println(white("white"))
println()
println(blackBackground("black-background"))
println(redBackground("red-background"))
println(greenBackground("green-background"))
println(yellowBackground("yellow-background"))
println(blueBackground("blue-background"))
println(purpleBackground("purple-background"))
println(cyanBackground("cyan-background"))
println(whiteBackground("white-background"))
println()
println(bold(black("bold-black")))
println(bold(red("bold-red")))
println(bold(green("bold-green")))
println(bold(yellow("bold-yellow")))
println(bold(blue("bold-blue")))
println(bold(purple("bold-purple")))
println(bold(cyan("bold-cyan")))
println(bold(white("bold-white")))
println()
println(blink(black("blink-black")))
println(blink(red("blink-red")))
println(blink(green("blink-green")))
println(blink(yellow("blink-yellow")))
println(blink(blue("blink-blue")))
println(blink(purple("blink-purple")))
println(blink(cyan("blink-cyan")))
println(blink(white("blink-white")))
println()
println(underline(black("underline-black")))
println(underline(red("underline-red")))
println(underline(green("underline-green")))
println(underline(yellow("underline-yellow")))
println(underline(blue("underline-blue")))
println(underline(purple("underline-purple")))
println(underline(cyan("underline-cyan")))
println(underline(white("underline-white")))
println()
println(underline(blink(bold(red("underline-blink-bold-red")))))
println(underline(blink(bold(red(whiteBackground("underline-blink-bold-red-white-background"))))))
println()
| pmatiello/ansiescape | src/test/visual-test.scala | Scala | apache-2.0 | 1,868 |
package models.database.alias
import org.squeryl.KeyedEntity
import org.squeryl.annotations._
case class Artist(@Column("artist_name") name:String,
@Column("spotify_id") spotifyId:Option[String] = None,
@Column("napster_id") napsterId:Option[String] = None,
@Column("soundcloud_id") soundcloudId:Option[String] = None,
@Column("deezer_id") deezerId:Option[String] = None,
@Column("lastfm_id") lastfmId:Option[String] = None,
@Column("pic_url") pic:Option[String] = None) extends KeyedEntity[Long] {
@Column("id_artist") val id:Long = 0
}
| haffla/stream-compare | app/models/database/alias/Artist.scala | Scala | gpl-3.0 | 652 |
/*
* Copyright 2001-2009 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest.fixture
import org.scalatest._
import scala.collection.immutable.ListSet
import org.scalatest.Suite.autoTagClassAnnotations
/**
* Implementation trait for class <code>fixture.FunSuite</code>, which is
* a sister class to <code>org.scalatest.FunSuite</code> that can pass a
* fixture object into its tests.
*
* <p>
* <a href="FunSuite.html"><code>fixture.FunSuite</code></a> is a class,
* not a trait, to minimize compile time given there is a slight compiler
* overhead to mixing in traits compared to extending classes. If you need
* to mix the behavior of <code>fixture.FunSuite</code> into some other
* class, you can use this trait instead, because class
* <code>fixture.FunSuite</code> does nothing more than extend this trait.
* </p>
*
* <p>
* See the documentation of the class for a <a href="FunSuite.html">detailed
* overview of <code>fixture.FunSuite</code></a>.
* </p>
*
* @author Bill Venners
*/
@Finders(Array("org.scalatest.finders.FunSuiteFinder"))
trait FunSuiteLike extends Suite { thisSuite =>
private final val engine = new FixtureEngine[FixtureParam]("concurrentFixtureFunSuiteMod", "FixtureFunSuite")
import engine._
private[scalatest] val sourceFileName = "FunSuiteLike.scala"
/**
* Returns an <code>Informer</code> that during test execution will forward strings (and other objects) passed to its
* <code>apply</code> method to the current reporter. If invoked in a constructor, it
* will register the passed string for forwarding later during test execution. If invoked while this
* <code>fixture.FunSuite</code> is being executed, such as from inside a test function, it will forward the information to
* the current reporter immediately. If invoked at any other time, it will
* throw an exception. This method can be called safely by any thread.
*/
implicit protected def info: Informer = atomicInformer.get
/**
* Register a test with the specified name, optional tags, and function value that takes no arguments.
* This method will register the test for later execution via an invocation of one of the <code>run</code>
* methods. The passed test name must not have been registered previously on
* this <code>FunSuite</code> instance.
*
* @param testName the name of the test
* @param testTags the optional list of tags for this test
* @param testFun the test function
* @throws TestRegistrationClosedException if invoked after <code>run</code> has been invoked on this suite
* @throws DuplicateTestNameException if a test with the same name has been registered previously
* @throws NotAllowedException if <code>testName</code> had been registered previously
* @throws NullPointerException if <code>testName</code> or any passed test tag is <code>null</code>
*/
protected def test(testName: String, testTags: Tag*)(testFun: FixtureParam => Any) {
registerTest(testName, Transformer(testFun), "testCannotAppearInsideAnotherTest", sourceFileName, "test", 4, -2, None, None, None, testTags: _*)
}
/**
* Register a test to ignore, which has the specified name, optional tags, and function value that takes no arguments.
* This method will register the test for later ignoring via an invocation of one of the <code>run</code>
* methods. This method exists to make it easy to ignore an existing test by changing the call to <code>test</code>
* to <code>ignore</code> without deleting or commenting out the actual test code. The test will not be run, but a
* report will be sent that indicates the test was ignored. The passed test name must not have been registered previously on
* this <code>FunSuite</code> instance.
*
* @param testName the name of the test
* @param testTags the optional list of tags for this test
* @param testFun the test function
* @throws TestRegistrationClosedException if invoked after <code>run</code> has been invoked on this suite
* @throws DuplicateTestNameException if a test with the same name has been registered previously
* @throws NotAllowedException if <code>testName</code> had been registered previously
*/
protected def ignore(testName: String, testTags: Tag*)(testFun: FixtureParam => Any) {
registerIgnoredTest(testName, Transformer(testFun), "ignoreCannotAppearInsideATest", sourceFileName, "ignore", 4, -2, None, testTags: _*)
}
/**
* An immutable <code>Set</code> of test names. If this <code>fixture.FunSuite</code> contains no tests, this method returns an empty <code>Set</code>.
*
* <p>
* This trait's implementation of this method will return a set that contains the names of all registered tests. The set's iterator will
* return those names in the order in which the tests were registered.
* </p>
*/
override def testNames: Set[String] = {
// I'm returning a ListSet here so that they tests will be run in registration order
ListSet(atomic.get.testNamesList.toArray: _*)
}
/**
* Run a test. This trait's implementation runs the test registered with the name specified by <code>testName</code>.
*
* @param testName the name of one test to run.
* @param args the <code>Args</code> for this run
*
* @throws IllegalArgumentException if <code>testName</code> is defined but a test with that name does not exist on this <code>fixture.FunSuite</code>
* @throws NullPointerException if any of <code>testName</code>, <code>reporter</code>, <code>stopper</code>, or <code>configMap</code>
* is <code>null</code>.
*/
protected override def runTest(testName: String, args: Args): Status = {
def invokeWithFixture(theTest: TestLeaf): Outcome = {
theTest.testFun match {
case transformer: org.scalatest.fixture.Transformer[_] =>
transformer.exceptionalTestFun match {
case wrapper: NoArgTestWrapper[_] =>
withFixture(new FixturelessTestFunAndConfigMap(testName, wrapper.test, args.configMap))
case fun => withFixture(new TestFunAndConfigMap(testName, fun, args.configMap))
}
case other =>
other match {
case wrapper: NoArgTestWrapper[_] =>
withFixture(new FixturelessTestFunAndConfigMap(testName, wrapper.test, args.configMap))
case fun => withFixture(new TestFunAndConfigMap(testName, fun, args.configMap))
}
}
}
runTestImpl(thisSuite, testName, args, true, invokeWithFixture)
}
/**
* A <code>Map</code> whose keys are <code>String</code> tag names to which tests in this <code>fixture.FunSuite</code> belong, and values
* the <code>Set</code> of test names that belong to each tag. If this <code>fixture.FunSuite</code> contains no tags, this method returns an empty
* <code>Map</code>.
*
* <p>
* This trait's implementation returns tags that were passed as strings contained in <code>Tag</code> objects passed to
* methods <code>test</code> and <code>ignore</code>.
* </p>
*
* <p>
* In addition, this trait's implementation will also auto-tag tests with class level annotations.
* For example, if you annotate @Ignore at the class level, all test methods in the class will be auto-annotated with @Ignore.
* </p>
*/
override def tags: Map[String, Set[String]] = autoTagClassAnnotations(atomic.get.tagsMap, this)
protected override def runTests(testName: Option[String], args: Args): Status = {
runTestsImpl(thisSuite, testName, args, info, true, runTest)
}
override def run(testName: Option[String], args: Args): Status = {
runImpl(thisSuite, testName, args: Args, super.run)
}
/**
* Registers shared tests.
*
* <p>
* This method enables the following syntax for shared tests in a <code>fixture.FunSuite</code>:
* </p>
*
* <pre class="stHighlight">
* testsFor(nonEmptyStack(lastValuePushed))
* </pre>
*
* <p>
* This method just provides syntax sugar intended to make the intent of the code clearer.
* Because the parameter passed to it is
* type <code>Unit</code>, the expression will be evaluated before being passed, which
* is sufficient to register the shared tests. For examples of shared tests, see the
* <a href="../FunSuite.html#SharedTests">Shared tests section</a> in the main documentation for
* trait <code>FunSuite</code>.
* </p>
*/
protected def testsFor(unit: Unit) {}
/**
* Implicitly converts a function that takes no parameters and results in <code>PendingNothing</code> to
* a function from <code>FixtureParam</code> to <code>Any</code>, to enable pending tests to registered as by-name parameters
* by methods that require a test function that takes a <code>FixtureParam</code>.
*
* <p>
* This method makes it possible to write pending tests as simply <code>(pending)</code>, without needing
* to write <code>(fixture => pending)</code>.
* </p>
*/
protected implicit def convertPendingToFixtureFunction(f: => PendingNothing): (FixtureParam => Any) = {
fixture => f
}
/**
* Implicitly converts a function that takes no parameters and results in <code>Any</code> to
* a function from <code>FixtureParam</code> to <code>Any</code>, to enable no-arg tests to registered
* by methods that require a test function that takes a <code>FixtureParam</code>.
*/
protected implicit def convertNoArgToFixtureFunction(fun: () => Any): (FixtureParam => Any) =
new NoArgTestWrapper(fun)
/**
* Suite style name.
*/
final override val styleName: String = "org.scalatest.fixture.FunSuite"
override def testDataFor(testName: String, theConfigMap: ConfigMap = ConfigMap.empty): TestData = createTestDataFor(testName, theConfigMap, this)
}
| svn2github/scalatest | src/main/scala/org/scalatest/fixture/FunSuiteLike.scala | Scala | apache-2.0 | 10,319 |
package workflow
import org.apache.spark.rdd.RDD
/**
* A [[TransformerOperator]] that gathers multiple datasets of {@tparam T} into a dataset of Seq[T]
* (Or individual datums of T into a single Seq[T])
*/
private[workflow] case class GatherTransformerOperator[T]() extends TransformerOperator {
override private[workflow] def singleTransform(inputs: Seq[DatumExpression]): Any = {
inputs.map(_.get.asInstanceOf[T])
}
override private[workflow] def batchTransform(inputs: Seq[DatasetExpression]): RDD[_] = {
inputs.map(_.get.asInstanceOf[RDD[T]].map(t => Seq(t))).reduceLeft((x, y) => {
x.zip(y).map(z => z._1 ++ z._2)
})
}
} | tomerk/keystone | src/main/scala/workflow/GatherTransformerOperator.scala | Scala | apache-2.0 | 657 |
package demy.mllib.text
import demy.mllib.util.log._
import demy.mllib.linalg.implicits._
import java.sql.Timestamp
import org.apache.spark.ml.{Transformer, Estimator, PipelineStage}
import org.apache.spark.ml.param.{Param, Params, ParamMap}
import org.apache.spark.ml.util.Identifiable
import org.apache.spark.ml.linalg.{Vector => MLVector, SparseVector, DenseVector}
import org.apache.spark.sql.{Dataset, DataFrame, Row}
import org.apache.spark.sql.types._
import org.apache.commons.text.similarity.LevenshteinDistance
import org.apache.spark.sql.functions.{udf, col, lower, lit, row_number}
import org.apache.spark.sql.expressions.Window
trait EntityCalculator extends PipelineStage {
final val textsColNames = new Param[Map[String, String]](this, "textsColNames", "The texts column name mappings defaults are: tagGroup, tag, tokens")
final val entitiesColNames = new Param[Map[String, String]](this, "entitiesColNames", "The entities column name mappings defaults are: tagGroup, tag, entityGroup, entityIndex, synonym, iteration, userStatus, changed, score, changedOn, stability")
final val entitiesDF = new Param[DataFrame](this, "entitiesDF", "The current entities")
final val currentIteration = new Param[Int](this, "currentIteration", "The current iteration to be traced")
final val maxPropositions = new Param[Int](this, "maxPropositions", "The maximum numbers of new words to be associated a category")
final val minScoreMatch = new Param[Double](this, "minScoreMatch", "The minimum score of two words to ve considered as synonyms")
def seTextsColNames(value: Map[String, String]): this.type = set(textsColNames, value)
def setEntitiesColNames(value: Map[String, String]): this.type = set(entitiesColNames, value)
def setEntitiesDF(value: DataFrame): this.type = set(entitiesDF, value)
def setCurrentIteration(value: Int): this.type = set(currentIteration, value)
def setMaxPropositions(value: Int): this.type = set(maxPropositions, value)
def setMinScoreMatch(value: Double): this.type = set(minScoreMatch, value)
override def transformSchema(schema: StructType): StructType = schema
def copy(extra: ParamMap): this.type = {defaultCopy(extra)}
setDefault(textsColNames -> Map("id"->"id", "tagGroup"->"tagGroup", "tag"->"tag", "tokens"->"tokens", "userStatus"->"userStatus", "vectors"->"vectors", "entities"->"entities", "text"->"text"
, "iteration"->"iteration", "userStatus"->"userStatus", "changed"->"changed", "score"->"score", "changedOn"->"ChangedOn", "stability"->"stability")
, entitiesColNames -> Map("tagGroup"->"tagGroup", "tag"->"tag", "entityGroup"->"entityGroup", "entityIndex"->"entityIndex", "synonym"->"synonym", "synonymVectors"->"synonymVectors"
, "iteration"->"iteration", "userStatus"->"userStatus", "changed"->"changed", "score"->"score", "changedOn"->"ChangedOn", "stability"->"stability")
, minScoreMatch -> 0.8
)
def entTagGroupCol = getOrDefault(entitiesColNames)("tagGroup")
def entTagCol = getOrDefault(entitiesColNames)("tag")
def entEntityGroupCol = getOrDefault(entitiesColNames)("entityGroup")
def entEntityIndexCol = getOrDefault(entitiesColNames)("entityIndex")
def entSynonymCol = getOrDefault(entitiesColNames)("synonym")
def entSynonymVectorCol = getOrDefault(entitiesColNames)("synonymVectors")
def entUserStatusCol = getOrDefault(entitiesColNames)("userStatus")
def traceEntCols = Array(getOrDefault(entitiesColNames)("iteration"), getOrDefault(entitiesColNames)("userStatus"), getOrDefault(entitiesColNames)("changed")
, getOrDefault(entitiesColNames)("score"), getOrDefault(entitiesColNames)("changedOn"), getOrDefault(entitiesColNames)("stability"))
def textIdCol = getOrDefault(textsColNames)("id")
def textTextCol = getOrDefault(textsColNames)("text")
def textTagGroupCol = getOrDefault(textsColNames)("tagGroup")
def textTagCol = getOrDefault(textsColNames)("tag")
def textEntitiesCol = getOrDefault(textsColNames)("entities")
def textTokensCol = getOrDefault(textsColNames)("tokens")
def textVectorsCol = getOrDefault(textsColNames)("vectors")
// def textUserStatusCol = getOrDefault(textsColNames)("userStatus")
def traceTextCols = Array(getOrDefault(textsColNames)("iteration"), getOrDefault(textsColNames)("userStatus"), getOrDefault(textsColNames)("changed")
, getOrDefault(textsColNames)("score"), getOrDefault(textsColNames)("changedOn"), getOrDefault(textsColNames)("stability"))
def setTextColName(mappings:(String, String)*) : this.type = set(textsColNames, getOrDefault(textsColNames) ++ mappings.toMap)
def setEntColName(mappings:(String, String)*) : this.type = set(entitiesColNames, getOrDefault(entitiesColNames) ++ mappings.toMap)
def proposedTrace(iteration:Int, score:Double) = Trace(iteration=iteration, userStatus = "proposed", score = score)
def entityDS(ds:Dataset[_]):Dataset[(String, String, Int, Int, Seq[String], MLVector, Trace)] = {
import ds.sparkSession.implicits._
ds.select(col(entTagGroupCol), col(textTagCol), col(entEntityGroupCol) , col(entEntityIndexCol), col(entSynonymCol), col(textVectorsCol), Trace.traceExp(getOrDefault(entitiesColNames)))
.as[(String, String, Int, Int, Seq[String], MLVector, Trace)]
}
def textDS(ds:DataFrame):Dataset[(String, String, String, Seq[String], Trace, Seq[String], Seq[MLVector], String)] = {
import ds.sparkSession.implicits._
ds.select(col(textIdCol), col(textTagGroupCol), col(textTagCol), col(textEntitiesCol), Trace.traceExp(), col(textTokensCol), col(textVectorsCol),col(textTextCol))
.as[(String, String, String, Seq[String], Trace, Seq[String], Seq[MLVector], String)]
}
def textDF(ds:Dataset[(String, String, String, Seq[String], Trace, Seq[String], Seq[MLVector], String)]):DataFrame = {
import ds.sparkSession.implicits._
ds.map{case(id, tagGroup, tag, entities, trace, tokens, vectors, text) => (id, tagGroup, tag, entities, tokens, vectors, text, trace.iteration, trace.userStatus, trace.changed, trace.score, trace.changedOn, trace.stability)}
.toDF((Array(textIdCol, textTagGroupCol, textTagCol, textEntitiesCol, textTokensCol, textVectorsCol, textTextCol) ++ traceTextCols ):_*)
}
def groupEntities(ds:Dataset[_]):Dataset[(String, String, Seq[Seq[(Seq[String], MLVector, Trace)]])] = {
import ds.sparkSession.implicits._
val allds = entityDS(ds)
.map(t => t match {case (group, category, entityGroup, entityIndex, synonym, vector, trace) => (group, category, entityGroup, entityIndex, Seq((synonym, vector, trace)))})
.groupByKey(t => t match {case (group, category, entityGroup, entityIndex, synonyms) => (group, category, entityGroup, entityIndex)})
.reduceGroups((t1, t2) => (t1, t2) match {case ((group1, category1, entityGroup1, entitiIndex1, synonyms1),(group2, category2, entityGroup2, entitiIndex2, synonyms2)) => (group1, category1, entityGroup1, entitiIndex1, synonyms1 ++ synonyms2)})
.map(t => t._2 match {case (group, category, entityGroup, entityIndex, synonyms) => (group, category, entityGroup, Map(entityIndex -> synonyms))})
.groupByKey(t => t match {case (group, category, entityGroup, synonyms) => (group, category, entityGroup)})
.reduceGroups((t1, t2) => (t1, t2) match {case ((group1, category1, entityGroup1, synonyms1),(group2, category2, entityGroup2, synonyms2)) => (group1, category1, entityGroup1, synonyms1 ++ synonyms2)})
.map(t => t._2 match {case (group, category, entityGroup, synonyms) => (group, category, synonyms.values.toSeq)})
val singleSizes = allds.filter(t => t match {case (group, category, synonyms) => synonyms.size == 1})
.groupByKey{case (group, category, synonyms) => (group, category)}
.reduceGroups((g1, g2)=>(g1, g2) match {case((group, cat, entity1),(_, _, entity2)) => (group, cat, Seq(entity1(0) ++ entity2(0)))})
.map(_._2)
val biggerSizes = allds.filter(t => t match {case (group, category, synonyms) => synonyms.size> 1})
singleSizes.union(biggerSizes)
}
def singleEntities(ds:Dataset[_], mapping:Map[String, String]=Map[String, String]()):Dataset[(String, String, Seq[(Seq[String], MLVector, Trace)])] = {
import ds.sparkSession.implicits._
groupEntities(ds).flatMap{case(group, category, legs) => if(legs.size ==1) Some(group, category, legs(0)) else None}
}
def ungroupEntities(entities:Array[(String, String, Seq[Seq[(Seq[String], MLVector, Trace)]])]):Seq[(String, String, Int, Int, Seq[String], MLVector, Trace)] = {
entities.groupBy{case(group, category, groupedSyns)=> (group, category)}.values.map(grouped => grouped.zipWithIndex.map{case((group, category, groupedSyns),i) => (group, category, i, groupedSyns)}).flatMap(a => a).toSeq
.flatMap{case(group, category, entityGroup, legs)=> legs.zipWithIndex.map{case(leg, index) => (group, category, entityGroup, index, leg)}}
.flatMap{case(group, category, entityGroup, entityIndex, leg)=> leg.map{case(tokens, vector, trace) => (group, category, entityGroup, entityIndex, tokens, vector, trace)}}
}
def ungroupEntitiesAsDF(entities:Array[(String, String, Seq[Seq[(Seq[String], MLVector, Trace)]])]):DataFrame = {
val spark = get(entitiesDF).get.sparkSession
import spark.implicits._
ungroupEntities(entities)
.toDS
.map{case(group, category, entityGroup, entityIndex, tokens, vector, trace) => (group, category, entityGroup, entityIndex, tokens, vector, trace.iteration, trace.userStatus, trace.changed, trace.score, trace.changedOn, trace.stability)}
.toDF((Array(entTagGroupCol, entTagCol, entEntityGroupCol, entEntityIndexCol, entSynonymCol, entSynonymVectorCol) ++ traceEntCols) :_*)
}
def ungroupEntities(entities:Dataset[(String, String, Seq[Seq[(Seq[String], MLVector, Trace)]])]):DataFrame = {
import entities.sparkSession.implicits._
entities.toDF("tagGroup", "tag", "entity")
.withColumn("groupId", row_number().over(Window.partitionBy($"tagGroup", $"tag"))).as[(String, String, Seq[Seq[(Seq[String], MLVector, Trace)]], Int)]
.flatMap{case(group, category, legs, entityGroup)=> legs.zipWithIndex.map{case(leg, index) => (group, category, entityGroup, index, leg)}}
.flatMap{case(group, category, entityGroup, entityIndex, leg)=> leg.map{case(tokens, vector, trace) => (group, category, entityGroup, entityIndex, tokens, vector, trace)}}
.map{case(group, category, entityGroup, entityIndex, tokens, vector, trace) => (group, category, entityGroup, entityIndex, tokens, vector, trace.iteration, trace.userStatus, trace.changed, trace.score, trace.changedOn, trace.stability)}
.toDF((Array(entTagGroupCol, entTagCol, entEntityGroupCol, entEntityIndexCol, entSynonymCol, entSynonymVectorCol) ++ traceEntCols) :_*)
}
def entityInText(tokens:Seq[String], vectors:Seq[MLVector], entity:Seq[Seq[(Seq[String], MLVector, Trace)]], minScore:Double) = {
entity.forall(entityLeg => entityLeg.exists{case(synExp, synVector, trace) =>
Range(0, tokens.size).exists{i => //137
(i + synExp.size <= tokens.size
&& (Range(0, synExp.size).forall(j => this.matchingScore(synExp(j), tokens(i+j)) >= minScore)
|| (synVector != null && Range(0, synExp.size).forall(j => vectors(i+j) != null) && Range(0, synExp.size).map(j => vectors(i + j)).reduce((v1, v2) => v1.sum(v2)).cosineSimilarity(synVector) >= minScore)
)
)
}})
}
def nonEntityTokens(tokens:Seq[String], vectors:Seq[MLVector], entity:Seq[Seq[(Seq[String], MLVector, Trace)]], minScore:Double) = {
tokens.zip(vectors).zipWithIndex.filter{case((token, vector), i) =>
!entity.forall(entityLeg => entityLeg.exists{case(synExp, synVector, trace) =>
(i + synExp.size <= tokens.size
&& (Range(0, synExp.size).forall(j => this.matchingScore(synExp(j), tokens(i+j)) >= minScore)
|| (synVector != null && Range(0, synExp.size).forall(j => vectors(i+j) != null) && Range(0, synExp.size).map(j => vectors(i + j)).reduce((v1, v2) => v1.sum(v2)).cosineSimilarity(synVector) >= minScore)
)
)
})}
.map{case((token, vector), i) => (token, vector)}
}
def printEntity(e:(String, String, Seq[Seq[(Seq[String], MLVector, Trace)]])) = e match {case(tagGroup, tag, entity) => println(s"($tagGroup)->($tag)");entity.foreach(leg => {println(s"Leg>> ${leg.map{case(tokens, vector, trace) => tokens.mkString("+")}.mkString(",")}")})}
def splitIfSingle(entity:Seq[Seq[(Seq[String], MLVector, Trace)]]):Seq[Seq[Seq[(Seq[String], MLVector, Trace)]]] = if(entity.size > 1) Seq(entity) else entity(0).map(syn => Seq(Seq(syn)))
def matchingScore(token1:String, token2:String, vector1:MLVector=null, vector2:MLVector=null) = {
val levenshtein = new LevenshteinDistance(1)
if(token1.toLowerCase == token2.toLowerCase) 1.0
else if(token1.size > 3 && levenshtein(token1.toLowerCase, token2.toLowerCase)>0) 1.0 - levenshtein(token1.toLowerCase, token2.toLowerCase).toDouble/token1.size
else if(vector1 !=null && vector2!=null) vector1.cosineSimilarity(vector2)
else 0.0
}
};trait EntityCalculatorModel extends EntityCalculator {
val outEntities:Array[(String, String, Seq[Seq[(Seq[String], MLVector, Trace)]])]
final val outEntitiesDF = new Param[DataFrame](this, "outEntitiesDF", "The current trandformed entities after fitting the model")
def setOutEntitiesDF(): this.type = set(outEntitiesDF, this.ungroupEntitiesAsDF(outEntities))
def transform(dataset: Dataset[_]):DataFrame = {
val spark = dataset.sparkSession
val sc = spark.sparkContext
import spark.implicits._
val currentEntities = sc.broadcast(this.groupEntities(get(entitiesDF).get.where(!lower(col(entUserStatusCol)).isin("ignore", "other"))).collect)
val currentIterationValue = get(currentIteration).get
val minMatchScore = getOrDefault(minScoreMatch)
debug("entities collected")
val EntitiesAssigned = this.textDS(dataset.toDF).map{ case (id, tagGroup, currentTag, entities, trace, wordsTokens, wordsVectors, text) => {
if(trace.userStatus == "fixed")
(id, tagGroup, currentTag, entities,trace ,wordsTokens, wordsVectors, text)
else {
var bestTagScore:Option[Double]=None
var bestTag:Option[String]=None
var bestSynonyms:Option[Seq[String]]=None
var bestStatus:Option[String]=None
currentEntities.value.foreach{case (dicoGroup, dicoTag, dicoEnt) =>
if(tagGroup == dicoGroup && (currentTag == null || currentTag == dicoTag) && dicoEnt != null && dicoEnt.size >0) {
//Evaluating possible tag represented by all its entities against a line represented by its tokens and vectors and we will return the best tags if any
val tagSize = dicoEnt.size
val bestSynGroup = Array.fill(tagSize)(None.asInstanceOf[Option[(String,Double, String)]])
dicoEnt.zipWithIndex.foreach{case (leg, partIndex) => {
//Evaluating an entity part
leg.foreach{case (synonymTokens, synVector, entTrace) =>
if(synonymTokens.size > 0) {
//Evaluating a synonym (defined by its tokens and vector) against a text (defined by its tokens and vectors)
val synonym = synonymTokens.mkString(" ")
var iWord = 0
wordsTokens.foreach(wordToken => {
//Evaluating a synonym (defined by its tokens and vector) against a word (defined by its tokens and vectors)
val word = wordsTokens.slice(iWord, iWord + synonymTokens.size).mkString(" ")
val tokenVector = wordsVectors(iWord)
val synScore = this.matchingScore(word, synonym, tokenVector, synVector)
val synStatus = entTrace.userStatus
bestSynGroup(partIndex) = bestSynGroup(partIndex) match {
case Some((bestSynonym, bestScore, bestStatus)) if synScore > bestScore => Some((synonym, synScore, synStatus))
case _ if synScore >= minMatchScore => Some((synonym, synScore, synStatus))
case _ => bestSynGroup(partIndex)
}
iWord = iWord + 1
})
}}
}}
val foundSynonyms = bestSynGroup.flatMap(t => t)
if(foundSynonyms.size == tagSize) {
val tagScore = foundSynonyms.map(t => t match {case (synonym, synScore, synStatus) => synScore}).map(score => score / foundSynonyms.size).sum
val matchedSynonyms = foundSynonyms.map(t => t match {case (synonym, synScore, synStatus) => synonym})
val matchedStatus = foundSynonyms.map(t => t match {case (synonym, synScore, synStatus) => synStatus})
.reduce((s1, s2)=>
if(s1 == "proposed" || s2 == "proposed") "proposed"
else if(s1 == "refined" || s2 == "refined") "refined"
else if(s1 == "ok" || s2 == "ok") "ok"
else throw new Exception(s"unexpected trace status betwen ($s1, $s2) @epi")
)
val thisIsBestTag = bestTagScore match {
case Some(bestScore) if tagScore > bestScore => true
case None => true
case _ => false
}
if(thisIsBestTag) {
bestTag = Some(dicoTag)
bestSynonyms = Some(matchedSynonyms)
bestTagScore = Some(tagScore)
bestStatus = Some(matchedStatus)
}
}
}
}
(bestTag, bestSynonyms, bestTagScore, bestStatus) match {
case (Some(chosenTag), Some(chosenSynonyms), Some(chosenScore), Some(chosenStatus) )
=> (id, tagGroup, chosenTag, chosenSynonyms, trace.setScore(chosenScore).setChanged(chosenTag != currentTag, currentIterationValue)
.setUserStatus(
if(chosenTag!=null && chosenStatus == "ok") "validated"
else if (chosenTag!=null) "matched"
else "proposed"
, currentIterationValue)
, wordsTokens, wordsVectors, text)
case _
=> (id, tagGroup, null, entities,trace.setUserStatus("pending", currentIterationValue).setChanged(null != currentTag, currentIterationValue) ,wordsTokens, wordsVectors, text)
}
}
}}
this.textDF(EntitiesAssigned)
}
}
| forchard-epi/demy | mllib/src/main/scala/text/EntityCalculator.scala | Scala | bsd-3-clause | 20,382 |
package mojave
import shapeless.Lens
import scala.reflect.ClassTag
case class PartialLens[A, B : ClassTag](wrapped: Lens[A, Option[B]]) {
def andThen[C](second: Lens[B, Option[C]]): Lens[A, Option[C]] = PartialStep(wrapped, second)
def modifyOpt(a: A)(fn: B => B) = wrapped.modify(a) { value => value.map(fn) }
/**
* Type-unsafe field accessor for case classes
*/
def optField [C] (fieldName: String): Lens[A, Option[C]] = OptObjectFieldLens[B, C](fieldName) compose wrapped
}
private case class OptObjectFieldLens[ObjectType : ClassTag, FieldType](field: String) extends Lens[Option[ObjectType], Option[FieldType]]{
private lazy val plainLens = TypeUnsafeObjectFieldLens[ObjectType, FieldType](field)
override def get(s: Option[ObjectType]): Option[FieldType] = s.map(o => plainLens.get(o))
override def set(s: Option[ObjectType])(a: Option[FieldType]): Option[ObjectType] = (s, a) match {
case (Some(objectValue), Some(fieldValue)) => Some(plainLens.set(objectValue)(fieldValue))
case (Some(objectValue), None) => throw new RuntimeException(s"Cannot set field $field of $objectValue to None")
case (None, Some(fieldValue)) => throw new RuntimeException(s"Cannot set field $field of None to $fieldValue")
case (None, None) => None
}
}
private case class PartialStep[A, B, C](first: Lens[A, Option[B]], second: Lens[B, Option[C]]) extends Lens[A, Option[C]] {
override def get(a: A): Option[C] = first.get(a).flatMap(second.get)
override def set(a: A)(c: Option[C]): A = first.set(a)(first.get(a).map(b => second.set(b)(c)))
} | raimohanska/mojave | src/main/scala/mojave/PartialLens.scala | Scala | mit | 1,575 |
package scala.tools.nsc
package backend.jvm
package opt
import org.junit.Assert._
import org.junit.Test
import org.junit.runner.RunWith
import org.junit.runners.JUnit4
import scala.jdk.CollectionConverters._
import scala.reflect.internal.util.JavaClearable
import scala.tools.nsc.backend.jvm.BTypes.MethodInlineInfo
import scala.tools.nsc.backend.jvm.BackendReporting._
import scala.tools.testkit.BytecodeTesting
@RunWith(classOf[JUnit4])
class InlineInfoTest extends BytecodeTesting {
import compiler._
import global.genBCode.{bTypes, postProcessor}
override def compilerArgs = "-opt:inline:**"
compiler.keepPerRunCachesAfterRun(List(
JavaClearable.forMap(bTypes.classBTypeCache),
postProcessor.byteCodeRepository.compilingClasses,
postProcessor.byteCodeRepository.parsedClasses))
@Test
def inlineInfosFromSymbolAndAttribute(): Unit = {
val code =
"""trait T {
| @inline def f: Int
| @noinline final def g = 0
|}
|trait U { self: T =>
| @inline def f = 0
| final def h = 0
| final class K {
| @inline def i = 0
| }
|}
|sealed trait V {
| @inline def j = 0
|}
|class C extends T with U
""".stripMargin
val classes = compileClasses(code)
val fromSyms = classes.map(c => global.genBCode.bTypes.cachedClassBType(c.name).info.get.inlineInfo)
val fromAttrs = classes.map(c => {
assert(c.attrs.asScala.exists(_.isInstanceOf[InlineInfoAttribute]), c.attrs)
global.genBCode.postProcessor.bTypesFromClassfile.inlineInfoFromClassfile(c)
})
assert(fromSyms == fromAttrs)
}
@Test // scala-dev#20
def javaStaticMethodsInlineInfoInMixedCompilation(): Unit = {
val jCode =
"""public class A {
| public static final int bar() { return 100; }
| public final int baz() { return 100; }
|}
""".stripMargin
compileClasses("class C { new A }", javaCode = List((jCode, "A.java")))
val info = global.genBCode.bTypes.cachedClassBType("A").info.get.inlineInfo
assertEquals(info.methodInfos, Map(
("bar", "()I") -> MethodInlineInfo(true,false,false),
("<init>", "()V") -> MethodInlineInfo(false,false,false),
("baz", "()I") -> MethodInlineInfo(true,false,false)))
}
@Test
def sd402(): Unit = {
val jCode =
"""package java.nio.file;
|public interface WatchEvent<T> {
| public static interface Kind<T> {
| static default String HAI() { return ""; }
| }
|}
|
""".stripMargin
compileClasses("class C { def t: java.nio.file.WatchEvent.Kind[String] = null }", javaCode = List((jCode, "WatchEvent.java")))
// before the fix of scala-dev#402, the companion of the nested class `Kind` (containing the static method) was taken from
// the classpath (classfile WatchEvent$Kind.class) instead of the actual companion from the source, so the static method was missing.
val info = global.genBCode.bTypes.cachedClassBType("java/nio/file/WatchEvent$Kind").info.get.inlineInfo
assertEquals(info.methodInfos, Map(
("HAI", "()Ljava/lang/String;") -> MethodInlineInfo(true,false,false),
("<init>", "()V") -> MethodInlineInfo(false,false,false)))
}
}
| scala/scala | test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala | Scala | apache-2.0 | 3,315 |
package org.sisioh.aws4s.sns.model
import com.amazonaws.services.sns.model.DeleteTopicRequest
import org.sisioh.aws4s.PimpedType
object DeleteTopicRequestFactory {
def create(): DeleteTopicRequest =
new DeleteTopicRequest()
def create(topicArn: String): DeleteTopicRequest =
new DeleteTopicRequest(topicArn)
}
class RichDeleteTopicRequest(val underlying: DeleteTopicRequest) extends AnyVal with PimpedType[DeleteTopicRequest] {
def topicArnOpt: Option[String] =
Option(underlying.getTopicArn)
def topicArnOpt_=(value: Option[String]): Unit =
underlying.setTopicArn(value.orNull)
def withTopicArnOpt(value: Option[String]): DeleteTopicRequest =
underlying.withTopicArn(value.orNull)
}
| sisioh/aws4s | aws4s-sns/src/main/scala/org/sisioh/aws4s/sns/model/RichDeleteTopicRequest.scala | Scala | mit | 722 |
package com.github.diegopacheco.scala3.playground.features
object TraitsMain extends App {
trait Animal:
def speak():Unit
trait HasTail:
def wagTail():Unit
class Dog extends Animal, HasTail:
def speak() = println("Woof")
def wagTail() = println("⎞⎜⎛ ⎞⎜⎛")
val dog = Dog()
println(s"${dog.speak()} ${dog.wagTail()}")
}
| diegopacheco/scala-playground | scala-3-playground/scala-3-playground/src/main/scala/com/github/diegopacheco/scala3/playground/features/TraitsMain.scala | Scala | unlicense | 371 |
/**
* Copyright 2015 Thomson Reuters
*
* Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import cmwell.domain._
import org.joda.time.DateTime
import org.scalatest.{FlatSpec, Matchers}
import scala.language.postfixOps
/**
* Created with IntelliJ IDEA.
* User: markz
* Date: 1/13/13
* Time: 3:15 PM
* Testing infoton serializer code
*/
class InfotonSerializerSpec extends FlatSpec with Matchers {
def serialize2Anddeserialize2(i: Infoton): Infoton = {
val (uuid,rows) = InfotonSerializer.serialize2(i)
val it = rows.view.flatMap {
case (q,fields) => fields.view.sortBy(_._1).flatMap{
case (fieldName,values) => values.view.sortBy(_._1).map(value => (q,fieldName,value))
}
}.iterator
InfotonSerializer.deserialize2(uuid,it)
}
"very big infoton" should "be successful" in {
val x: Set[FieldValue] = {
val b = Set.newBuilder[FieldValue]
b.sizeHint(100000)
for(i <- 0 until 100000) {
b += FString(s"cmwell://name/$i")
}
b.result()
}
val objInfo = new ObjectInfoton(
path = "/command-test/objinfo1",
dc = "test",
fields = Option(Map[String,Set[FieldValue]]("g" -> Set(FString("h")),"last" -> Set(FString("zitnik")), "name" -> x )), protocol = None)
InfotonSerializer.deserialize(InfotonSerializer.serialize(objInfo)) shouldEqual objInfo
serialize2Anddeserialize2(objInfo) shouldEqual objInfo
}
"object infoton serializer" should "be successful" in {
val objInfo = ObjectInfoton("/command-test/objinfo1","dc_test", None, Map("name" -> Set[FieldValue](FString("gal"), FString("yoav"))), None)
val objInfoCmp = InfotonSerializer.deserialize(InfotonSerializer.serialize(objInfo))
// check system
objInfo.path should equal(objInfoCmp.path)
objInfo.uuid should equal(objInfoCmp.uuid)
objInfo.lastModified should equal(objInfoCmp.lastModified)
// check fields
objInfo.fields.get("name").size should equal(objInfoCmp.fields.get("name").size)
}
"empty file infoton serializer" should "be successful" in {
val fc = FileContent("text/plain",0)
val emptyInfo = FileInfoton("/command-test/objinfo1","dc_test", None, Map("name" -> Set[FieldValue](FString("gal"), FString("yoav"))) , fc, None)
val emptyInfoCmp = InfotonSerializer.deserialize(InfotonSerializer.serialize(emptyInfo))
emptyInfo.path should equal (emptyInfoCmp.path)
emptyInfo.uuid should equal (emptyInfoCmp.uuid)
emptyInfo.lastModified should equal (emptyInfoCmp.lastModified)
}
"file binary infoton serializer" should "be successful" in {
val source = scala.io.Source.fromFile("./src/test/resources/mascot.jpg" ,"iso-8859-1")
val byteArray = source.map(_.toByte).toArray
source.close()
val s = byteArray
val img : FileContent = FileContent(s, "image/jpeg;charset=iso-8859-1")
val imgInfo = FileInfoton("/command-test/objinfo1","dc_test", None, Map("name" -> Set[FieldValue](FString("gal"), FString("yoav"))), img, None)
val imgInfoCmp = InfotonSerializer.deserialize(InfotonSerializer.serialize(imgInfo))
val imgInfoCmp2 = serialize2Anddeserialize2(imgInfo)
// check system
imgInfo.path should equal (imgInfoCmp.path)
imgInfo.uuid should equal (imgInfoCmp.uuid)
imgInfo.lastModified should equal (imgInfoCmp.lastModified)
imgInfo.path should equal (imgInfoCmp2.path)
imgInfo.uuid should equal (imgInfoCmp2.uuid)
imgInfo.lastModified should equal (imgInfoCmp2.lastModified)
// check fields
imgInfo.fields.get("name").size should equal (imgInfoCmp.fields.get("name").size)
imgInfo.fields.get("name").size should equal (imgInfoCmp2.fields.get("name").size)
(imgInfoCmp: @unchecked) match {
case FileInfoton(_,_,_,_,fields , content, _ , _ ) =>
content.get match {
case FileContent(data,mimeType,_,_) =>
val d = data.get
d should equal (s)
"image/jpeg;charset=iso-8859-1" should equal (mimeType)
}
}
(imgInfoCmp2: @unchecked) match {
case FileInfoton(_,_,_,_,fields , content, _ , _ ) =>
content.get match {
case FileContent(data,mimeType,_,_) =>
val d = data.get
d should equal (s)
"image/jpeg;charset=iso-8859-1" should equal (mimeType)
}
}
}
"file text infoton serializer" should "be successful" in {
val source = scala.io.Source.fromFile("./src/test/resources/test.txt" ,"UTF-8")
val byteArray = source.map(_.toByte).toArray
source.close()
val s = byteArray
val text : FileContent = FileContent(s, "text/plain;charset=utf-8")
val textInfo = FileInfoton("/command-test/objinfo1","dc_test", None, Map("name" -> Set[FieldValue](FString("gal"), FString("yoav"))), text, None)
val textInfoCmp = InfotonSerializer.deserialize(InfotonSerializer.serialize(textInfo))
val textInfoCmp2 = serialize2Anddeserialize2(textInfo)
// check system
textInfo.path should equal (textInfoCmp.path)
textInfo.uuid should equal (textInfoCmp.uuid)
textInfo.lastModified should equal (textInfoCmp.lastModified)
textInfo.path should equal (textInfoCmp2.path)
textInfo.uuid should equal (textInfoCmp2.uuid)
textInfo.lastModified should equal (textInfoCmp2.lastModified)
// check fields
textInfo.fields.get("name").size should equal (textInfoCmp.fields.get("name").size)
textInfo.fields.get("name").size should equal (textInfoCmp2.fields.get("name").size)
(textInfoCmp: @unchecked) match {
case FileInfoton(_,_,_,_,fields , content, _ , _ ) =>
content.get match {
case FileContent(data,mimeType,_,_) =>
val d = data.get
d should equal (s)
"text/plain;charset=utf-8" should equal (mimeType)
}
}
(textInfoCmp2: @unchecked) match {
case FileInfoton(_,_,_,_,fields , content, _ , _ ) =>
content.get match {
case FileContent(data,mimeType,_,_) =>
val d = data.get
d should equal (s)
"text/plain;charset=utf-8" should equal (mimeType)
}
}
}
// TODO: make this configurable
val chunkSize = 65536
"big file infoton with % chunkSize != 0" should "be successful" in {
val bArr = Array.tabulate[Byte](chunkSize + chunkSize + 12345)(_.&(0xff).toByte)
val data : FileContent = FileContent(bArr, "application/octet-stream")
val fInf = FileInfoton("/command-test/fileinfo1","dc_test", None, Map("name" -> Set[FieldValue](FString("gal"), FString("yoav"))), data, None)
val dataInfoCmp = InfotonSerializer.deserialize(InfotonSerializer.serialize(fInf))
val dataInfoCmp2 = serialize2Anddeserialize2(fInf)
// check system
fInf.path should equal (dataInfoCmp.path)
fInf.uuid should equal (dataInfoCmp.uuid)
fInf.lastModified should equal (dataInfoCmp.lastModified)
fInf.path should equal (dataInfoCmp2.path)
fInf.uuid should equal (dataInfoCmp2.uuid)
fInf.lastModified should equal (dataInfoCmp2.lastModified)
// check fields
fInf.fields.get("name").size should equal (dataInfoCmp.fields.get("name").size)
fInf.fields.get("name").size should equal (dataInfoCmp2.fields.get("name").size)
(dataInfoCmp: @unchecked) match {
case FileInfoton(_,_,_,_,_,content,_,_) =>
content.get match {
case FileContent(binData,mimeType,_,_) =>
val d = binData.get
d should equal (bArr)
"application/octet-stream" should equal (mimeType)
}
}
(dataInfoCmp2: @unchecked) match {
case FileInfoton(_,_,_,_,_,content,_,_) =>
content.get match {
case FileContent(binData,mimeType,_,_) =>
val d = binData.get
d should equal (bArr)
"application/octet-stream" should equal (mimeType)
}
}
}
"big file infoton with % chunkSize == 0" should "be successful" in {
val bArr = Array.tabulate[Byte](2*chunkSize)(_.&(0xff).toByte)
val data : FileContent = FileContent(bArr, "application/octet-stream")
val fInf = FileInfoton("/command-test/fileinfo1","dc_test", None, Map("name" -> Set[FieldValue](FString("gal"), FString("yoav"))), data, None)
val dataInfoCmp = InfotonSerializer.deserialize(InfotonSerializer.serialize(fInf))
val dataInfoCmp2 = serialize2Anddeserialize2(fInf)
// check system
fInf.path should equal (dataInfoCmp.path)
fInf.uuid should equal (dataInfoCmp.uuid)
fInf.lastModified should equal (dataInfoCmp.lastModified)
fInf.path should equal (dataInfoCmp2.path)
fInf.uuid should equal (dataInfoCmp2.uuid)
fInf.lastModified should equal (dataInfoCmp2.lastModified)
// check fields
fInf.fields.get("name").size should equal (dataInfoCmp.fields.get("name").size)
fInf.fields.get("name").size should equal (dataInfoCmp2.fields.get("name").size)
(dataInfoCmp: @unchecked) match {
case FileInfoton(_,_,_,_,_,content,_,_) =>
content.get match {
case FileContent(binData,mimeType,_,_) =>
val d = binData.get
d should equal (bArr)
"application/octet-stream" should equal (mimeType)
}
}
(dataInfoCmp2: @unchecked) match {
case FileInfoton(_,_,_,_,_,content,_,_) =>
content.get match {
case FileContent(binData,mimeType,_,_) =>
val d = binData.get
d should equal (bArr)
"application/octet-stream" should equal (mimeType)
}
}
}
"link infoton serializer" should "be successful" in {
val forward = LinkInfoton(
"/command-test/objinfo1",
"dc_test",
Map("name" -> Set[FieldValue](FString("gal"), FString("yoav"))),
"/mark",
LinkType.Forward, None)
val forwardCmp = InfotonSerializer.deserialize(InfotonSerializer.serialize(forward))
// check system
forward.path should equal (forwardCmp.path)
forward.uuid should equal (forwardCmp.uuid)
forward.lastModified should equal (forwardCmp.lastModified)
// check link
(forwardCmp: @unchecked) match {
case LinkInfoton(_,_,_,_,_,t,lt,_,_) => {
forward.linkTo should equal (t)
forward.linkType should equal (lt)
}
}
// check fields
forward.fields.get("name").size should equal (forwardCmp.fields.get("name").size)
val per = LinkInfoton("/command-test/objinfo1","dc_test",
Map("name" -> Set[FieldValue](FString("gal"), FString("yoav"))) , "/mark" , LinkType.Permanent, None)
val perCmp = InfotonSerializer.deserialize(InfotonSerializer.serialize(per))
// check system
per.path should equal (perCmp.path)
per.uuid should equal (perCmp.uuid)
per.lastModified should equal (perCmp.lastModified)
// check link
(perCmp: @unchecked) match {
case LinkInfoton(_,_,_,_,_,t,lt,_,_ ) => {
per.linkTo should equal (t)
per.linkType should equal (lt)
}
}
// check fields
per.fields.get("name").size should equal (perCmp.fields.get("name").size)
val temp = LinkInfoton("/command-test/objinfo1","dc_test",
Map("name" -> Set[FieldValue](FString("gal"), FString("yoav"))) , "/mark" , LinkType.Temporary, None)
val tempCmp = InfotonSerializer.deserialize(InfotonSerializer.serialize(temp))
// check system
temp.path should equal (tempCmp.path)
temp.uuid should equal (tempCmp.uuid)
temp.lastModified should equal (tempCmp.lastModified)
// check link
(tempCmp: @unchecked) match {
case LinkInfoton(_,_,_,_,_,t,lt,_,_ ) => {
temp.linkTo should equal (t)
temp.linkType should equal (lt)
}
}
// check fields
temp.fields.get("name").size should equal (tempCmp.fields.get("name").size)
}
"deleted infoton serializer" should "be successful" in {
val deleted = DeletedInfoton("/command-test/delete","dc_test")
val deletedCmp = InfotonSerializer.deserialize(InfotonSerializer.serialize(deleted))
deleted.path should equal (deletedCmp.path)
deleted.lastModified should equal (deletedCmp.lastModified)
}
"diffetent infotons with same fields" should "return isSameAs==true" in {
val infoton1 = ObjectInfoton(
"/pathOfInfoton1",
"dc_test",
None,
new DateTime("2015-03-04T12:51:39.000Z"),
Map("Mark"->Set[FieldValue](FString("King"),FString("Awesome"))), None)
val infoton2 = ObjectInfoton(
"/pathOfInfoton2",
"dc_test",
None,
new DateTime("2001-02-03T09:34:21.000Z"),
Map("Mark"->Set[FieldValue](FString("Awesome"),FString("King"))), None)
(infoton1 isSameAs infoton2) should equal (true)
}
}
| bryaakov/CM-Well | server/cmwell-domain/src/test/scala/InfotonSerializerSpec.scala | Scala | apache-2.0 | 13,168 |
// Copyright (C) 2015-2017 Red Tulip Systems BV.
//
// Author [email protected]
import sbt._
import Process._
import Keys._
object TelephonyConfiguration {
val ProjectSettings = Seq(
resolvers += Resolver.sonatypeRepo("snapshots"),
libraryDependencies ++= Seq(
"com.twilio.sdk" % "twilio" % "7.5.0",
"com.nexmo" % "client" % "2.0.0-SNAPSHOT",
"commons-net" % "commons-net" % "3.5"
)
)
}
| mouchtaris/jleon | project/TelephonyConfiguration.scala | Scala | mit | 447 |
/*
* Scala (https://www.scala-lang.org)
*
* Copyright EPFL and Lightbend, Inc.
*
* Licensed under Apache License 2.0
* (http://www.apache.org/licenses/LICENSE-2.0).
*
* See the NOTICE file distributed with this work for
* additional information regarding copyright ownership.
*/
package scala
package collection
package immutable
import java.io.{ObjectInputStream, ObjectOutputStream}
import java.lang.{StringBuilder => JStringBuilder}
import scala.annotation.tailrec
import scala.annotation.unchecked.uncheckedVariance
import scala.collection.generic.SerializeEnd
import scala.collection.mutable.{ArrayBuffer, StringBuilder}
import Stream.cons
@deprecated("Use LazyList (which is fully lazy) instead of Stream (which has a lazy tail only)", "2.13.0")
@SerialVersionUID(3L)
sealed abstract class Stream[+A] extends AbstractSeq[A]
with LinearSeq[A]
with LinearSeqOps[A, Stream, Stream[A]]
with IterableFactoryDefaults[A, Stream]
with Serializable {
def tail: Stream[A]
/** Forces evaluation of the whole `Stream` and returns it.
*
* @note Often we use `Stream`s to represent an infinite set or series. If
* that's the case for your particular `Stream` then this function will never
* return and will probably crash the VM with an `OutOfMemory` exception.
* This function will not hang on a finite cycle, however.
*
* @return The fully realized `Stream`.
*/
def force: this.type
override def iterableFactory: SeqFactory[Stream] = Stream
override protected[this] def className: String = "Stream"
/** Apply the given function `f` to each element of this linear sequence
* (while respecting the order of the elements).
*
* @param f The treatment to apply to each element.
* @note Overridden here as final to trigger tail-call optimization, which
* replaces 'this' with 'tail' at each iteration. This is absolutely
* necessary for allowing the GC to collect the underlying Stream as elements
* are consumed.
* @note This function will force the realization of the entire Stream
* unless the `f` throws an exception.
*/
@tailrec
override final def foreach[U](f: A => U): Unit = {
if (!this.isEmpty) {
f(head)
tail.foreach(f)
}
}
@tailrec
override final def find(p: A => Boolean): Option[A] = {
if(isEmpty) None
else if(p(head)) Some(head)
else tail.find(p)
}
override def take(n: Int): Stream[A] = {
if (n <= 0 || isEmpty) Stream.empty
else if (n == 1) new Stream.Cons(head, Stream.empty)
else new Stream.Cons(head, tail.take(n - 1))
}
/** Stream specialization of foldLeft which allows GC to collect along the
* way.
*
* @tparam B The type of value being accumulated.
* @param z The initial value seeded into the function `op`.
* @param op The operation to perform on successive elements of the `Stream`.
* @return The accumulated value from successive applications of `op`.
*/
@tailrec
override final def foldLeft[B](z: B)(op: (B, A) => B): B = {
if (this.isEmpty) z
else tail.foldLeft(op(z, head))(op)
}
/** The stream resulting from the concatenation of this stream with the argument stream.
* @param rest The collection that gets appended to this stream
* @return The stream containing elements of this stream and the iterable object.
*/
@deprecated("The `append` operation has been renamed `lazyAppendedAll`", "2.13.0")
@inline final def append[B >: A](rest: => IterableOnce[B]): Stream[B] = lazyAppendedAll(rest)
protected[this] def writeReplace(): AnyRef =
if(nonEmpty && tailDefined) new Stream.SerializationProxy[A](this) else this
/** Prints elements of this stream one by one, separated by commas. */
@deprecated(message = """Use print(stream.force.mkString(", ")) instead""", since = "2.13.0")
@inline def print(): Unit = Console.print(this.force.mkString(", "))
/** Prints elements of this stream one by one, separated by `sep`.
* @param sep The separator string printed between consecutive elements.
*/
@deprecated(message = "Use print(stream.force.mkString(sep)) instead", since = "2.13.0")
@inline def print(sep: String): Unit = Console.print(this.force.mkString(sep))
/** The stream resulting from the concatenation of this stream with the argument stream.
*
* @param suffix The collection that gets appended to this stream
* @return The stream containing elements of this stream and the iterable object.
*/
def lazyAppendedAll[B >: A](suffix: => collection.IterableOnce[B]): Stream[B] =
if (isEmpty) iterableFactory.from(suffix) else cons[B](head, tail.lazyAppendedAll(suffix))
override def equals(that: Any): Boolean =
if (this eq that.asInstanceOf[AnyRef]) true else super.equals(that)
override def scanLeft[B](z: B)(op: (B, A) => B): Stream[B] =
if (isEmpty) z +: iterableFactory.empty
else cons(z, tail.scanLeft(op(z, head))(op))
/** Stream specialization of reduceLeft which allows GC to collect
* along the way.
*
* @tparam B The type of value being accumulated.
* @param f The operation to perform on successive elements of the `Stream`.
* @return The accumulated value from successive applications of `f`.
*/
override final def reduceLeft[B >: A](f: (B, A) => B): B = {
if (this.isEmpty) throw new UnsupportedOperationException("empty.reduceLeft")
else {
var reducedRes: B = this.head
var left: Stream[A] = this.tail
while (!left.isEmpty) {
reducedRes = f(reducedRes, left.head)
left = left.tail
}
reducedRes
}
}
override def partition(p: A => Boolean): (Stream[A], Stream[A]) = (filter(p(_)), filterNot(p(_)))
override def filter(pred: A => Boolean): Stream[A] = filterImpl(pred, isFlipped = false)
override def filterNot(pred: A => Boolean): Stream[A] = filterImpl(pred, isFlipped = true)
private[immutable] def filterImpl(p: A => Boolean, isFlipped: Boolean): Stream[A] = {
// optimization: drop leading prefix of elems for which f returns false
// var rest = this dropWhile (!p(_)) - forget DRY principle - GC can't collect otherwise
var rest: Stream[A] = coll
while (rest.nonEmpty && p(rest.head) == isFlipped) rest = rest.tail
// private utility func to avoid `this` on stack (would be needed for the lazy arg)
if (rest.nonEmpty) Stream.filteredTail(rest, p, isFlipped)
else iterableFactory.empty
}
/** A `collection.WithFilter` which allows GC of the head of stream during processing */
override final def withFilter(p: A => Boolean): collection.WithFilter[A, Stream] =
Stream.withFilter(coll, p)
override final def prepended[B >: A](elem: B): Stream[B] = cons(elem, coll)
override final def map[B](f: A => B): Stream[B] =
if (isEmpty) iterableFactory.empty
else cons(f(head), tail.map(f))
@tailrec override final def collect[B](pf: PartialFunction[A, B]): Stream[B] =
if(isEmpty) Stream.empty
else {
var newHead: B = null.asInstanceOf[B]
val runWith = pf.runWith((b: B) => newHead = b)
if(runWith(head)) Stream.collectedTail(newHead, this, pf)
else tail.collect(pf)
}
@tailrec override final def collectFirst[B](pf: PartialFunction[A, B]): Option[B] =
if(isEmpty) None
else {
var newHead: B = null.asInstanceOf[B]
val runWith = pf.runWith((b: B) => newHead = b)
if(runWith(head)) Some(newHead)
else tail.collectFirst(pf)
}
// optimisations are not for speed, but for functionality
// see tickets #153, #498, #2147, and corresponding tests in run/ (as well as run/stream_flatmap_odds.scala)
override final def flatMap[B](f: A => IterableOnce[B]): Stream[B] =
if (isEmpty) iterableFactory.empty
else {
// establish !prefix.isEmpty || nonEmptyPrefix.isEmpty
var nonEmptyPrefix: Stream[A] = coll
var prefix = iterableFactory.from(f(nonEmptyPrefix.head))
while (!nonEmptyPrefix.isEmpty && prefix.isEmpty) {
nonEmptyPrefix = nonEmptyPrefix.tail
if(!nonEmptyPrefix.isEmpty)
prefix = iterableFactory.from(f(nonEmptyPrefix.head))
}
if (nonEmptyPrefix.isEmpty) iterableFactory.empty
else prefix.lazyAppendedAll(nonEmptyPrefix.tail.flatMap(f))
}
override final def zip[B](that: collection.IterableOnce[B]): Stream[(A, B)] =
if (this.isEmpty || that.isEmpty) iterableFactory.empty
else {
val thatIterable = that match {
case that: collection.Iterable[B] => that
case _ => LazyList.from(that)
}
cons[(A, B)]((this.head, thatIterable.head), this.tail.zip(thatIterable.tail))
}
override final def zipWithIndex: Stream[(A, Int)] = this.zip(LazyList.from(0))
protected def tailDefined: Boolean
/** Appends all elements of this $coll to a string builder using start, end, and separator strings.
* The written text begins with the string `start` and ends with the string `end`.
* Inside, the string representations (w.r.t. the method `toString`)
* of all elements of this $coll are separated by the string `sep`.
*
* Undefined elements are represented with `"_"`, an undefined tail is represented with `"<not computed>"`,
* and cycles are represented with `"<cycle>"`.
*
* @param sb the string builder to which elements are appended.
* @param start the starting string.
* @param sep the separator string.
* @param end the ending string.
* @return the string builder `b` to which elements were appended.
*/
override def addString(sb: StringBuilder, start: String, sep: String, end: String): StringBuilder = {
force
addStringNoForce(sb.underlying, start, sep, end)
sb
}
private[this] def addStringNoForce(b: JStringBuilder, start: String, sep: String, end: String): JStringBuilder = {
b.append(start)
if (nonEmpty) {
b.append(head)
var cursor = this
def appendCursorElement(): Unit = b.append(sep).append(cursor.head)
if (tailDefined) { // If tailDefined, also !isEmpty
var scout = tail
if (cursor ne scout) {
cursor = scout
if (scout.tailDefined) {
scout = scout.tail
// Use 2x 1x iterator trick for cycle detection; slow iterator can add strings
while ((cursor ne scout) && scout.tailDefined) {
appendCursorElement()
cursor = cursor.tail
scout = scout.tail
if (scout.tailDefined) scout = scout.tail
}
}
}
if (!scout.tailDefined) { // Not a cycle, scout hit an end
while (cursor ne scout) {
appendCursorElement()
cursor = cursor.tail
}
if (cursor.nonEmpty) {
appendCursorElement()
}
}
else {
// Cycle.
// If we have a prefix of length P followed by a cycle of length C,
// the scout will be at position (P%C) in the cycle when the cursor
// enters it at P. They'll then collide when the scout advances another
// C - (P%C) ahead of the cursor.
// If we run the scout P farther, then it will be at the start of
// the cycle: (C - (P%C) + (P%C)) == C == 0. So if another runner
// starts at the beginning of the prefix, they'll collide exactly at
// the start of the loop.
var runner = this
var k = 0
while (runner ne scout) {
runner = runner.tail
scout = scout.tail
k += 1
}
// Now runner and scout are at the beginning of the cycle. Advance
// cursor, adding to string, until it hits; then we'll have covered
// everything once. If cursor is already at beginning, we'd better
// advance one first unless runner didn't go anywhere (in which case
// we've already looped once).
if ((cursor eq scout) && (k > 0)) {
appendCursorElement()
cursor = cursor.tail
}
while (cursor ne scout) {
appendCursorElement()
cursor = cursor.tail
}
}
}
if (cursor.nonEmpty) {
// Either undefined or cyclic; we can check with tailDefined
if (!cursor.tailDefined) b.append(sep).append("<not computed>")
else b.append(sep).append("<cycle>")
}
}
b.append(end)
}
/**
* @return a string representation of this collection. Undefined elements are
* represented with `"_"`, an undefined tail is represented with `"<not computed>"`,
* and cycles are represented with `"<cycle>"`
*
* Examples:
*
* - `"Stream(_, <not computed>)"`, a non-empty stream, whose head has not been
* evaluated ;
* - `"Stream(_, 1, _, <not computed>)"`, a stream with at least three elements,
* the second one has been evaluated ;
* - `"Stream(1, 2, 3, <cycle>)"`, an infinite stream that contains
* a cycle at the fourth element.
*/
override def toString = addStringNoForce(new JStringBuilder(className), "(", ", ", ")").toString
@deprecated("Check .knownSize instead of .hasDefiniteSize for more actionable information (see scaladoc for details)", "2.13.0")
override def hasDefiniteSize: Boolean = isEmpty || {
if (!tailDefined) false
else {
// Two-iterator trick (2x & 1x speed) for cycle detection.
var those = this
var these = tail
while (those ne these) {
if (these.isEmpty) return true
if (!these.tailDefined) return false
these = these.tail
if (these.isEmpty) return true
if (!these.tailDefined) return false
these = these.tail
if (those eq these) return false
those = those.tail
}
false // Cycle detected
}
}
}
@deprecated("Use LazyList (which is fully lazy) instead of Stream (which has a lazy tail only)", "2.13.0")
@SerialVersionUID(3L)
object Stream extends SeqFactory[Stream] {
//@SerialVersionUID(3L) //TODO Putting an annotation on Stream.empty causes a cyclic dependency in unpickling
object Empty extends Stream[Nothing] {
override def isEmpty: Boolean = true
override def head: Nothing = throw new NoSuchElementException("head of empty stream")
override def tail: Stream[Nothing] = throw new UnsupportedOperationException("tail of empty stream")
/** Forces evaluation of the whole `Stream` and returns it.
*
* @note Often we use `Stream`s to represent an infinite set or series. If
* that's the case for your particular `Stream` then this function will never
* return and will probably crash the VM with an `OutOfMemory` exception.
* This function will not hang on a finite cycle, however.
*
* @return The fully realized `Stream`.
*/
def force: this.type = this
override def knownSize: Int = 0
protected def tailDefined: Boolean = false
}
@SerialVersionUID(3L)
final class Cons[A](override val head: A, tl: => Stream[A]) extends Stream[A] {
override def isEmpty: Boolean = false
@volatile private[this] var tlVal: Stream[A] = _
@volatile private[this] var tlGen = () => tl
protected def tailDefined: Boolean = tlGen eq null
override def tail: Stream[A] = {
if (!tailDefined)
synchronized {
if (!tailDefined) {
tlVal = tlGen()
tlGen = null
}
}
tlVal
}
/** Forces evaluation of the whole `Stream` and returns it.
*
* @note Often we use `Stream`s to represent an infinite set or series. If
* that's the case for your particular `Stream` then this function will never
* return and will probably crash the VM with an `OutOfMemory` exception.
* This function will not hang on a finite cycle, however.
*
* @return The fully realized `Stream`.
*/
def force: this.type = {
// Use standard 2x 1x iterator trick for cycle detection ("those" is slow one)
var these, those: Stream[A] = this
if (!these.isEmpty) these = these.tail
while (those ne these) {
if (these.isEmpty) return this
these = these.tail
if (these.isEmpty) return this
these = these.tail
if (these eq those) return this
those = those.tail
}
this
}
}
/** An alternative way of building and matching Streams using Stream.cons(hd, tl).
*/
object cons {
/** A stream consisting of a given first element and remaining elements
* @param hd The first element of the result stream
* @param tl The remaining elements of the result stream
*/
def apply[A](hd: A, tl: => Stream[A]): Stream[A] = new Cons(hd, tl)
/** Maps a stream to its head and tail */
def unapply[A](xs: Stream[A]): Option[(A, Stream[A])] = #::.unapply(xs)
}
implicit def toDeferrer[A](l: => Stream[A]): Deferrer[A] = new Deferrer[A](() => l)
final class Deferrer[A] private[Stream] (private val l: () => Stream[A]) extends AnyVal {
/** Construct a Stream consisting of a given first element followed by elements
* from another Stream.
*/
def #:: [B >: A](elem: B): Stream[B] = new Cons(elem, l())
/** Construct a Stream consisting of the concatenation of the given Stream and
* another Stream.
*/
def #:::[B >: A](prefix: Stream[B]): Stream[B] = prefix lazyAppendedAll l()
}
object #:: {
def unapply[A](s: Stream[A]): Option[(A, Stream[A])] =
if (s.nonEmpty) Some((s.head, s.tail)) else None
}
def from[A](coll: collection.IterableOnce[A]): Stream[A] = coll match {
case coll: Stream[A] => coll
case _ => fromIterator(coll.iterator)
}
/**
* @return A `Stream[A]` that gets its elements from the given `Iterator`.
*
* @param it Source iterator
* @tparam A type of elements
*/
// Note that the resulting `Stream` will be effectively iterable more than once because
// `Stream` memoizes its elements
def fromIterator[A](it: Iterator[A]): Stream[A] =
if (it.hasNext) {
new Stream.Cons(it.next(), fromIterator(it))
} else Stream.Empty
def empty[A]: Stream[A] = Empty
override def newBuilder[A]: mutable.Builder[A, Stream[A]] = ArrayBuffer.newBuilder[A].mapResult(array => from(array))
private[immutable] def withFilter[A](l: Stream[A] @uncheckedVariance, p: A => Boolean): collection.WithFilter[A, Stream] =
new WithFilter[A](l, p)
private[this] final class WithFilter[A](l: Stream[A] @uncheckedVariance, p: A => Boolean) extends collection.WithFilter[A, Stream] {
private[this] var s = l // set to null to allow GC after filtered
private[this] lazy val filtered: Stream[A] = { val f = s.filter(p); s = null.asInstanceOf[Stream[A]]; f } // don't set to null if throw during filter
def map[B](f: A => B): Stream[B] = filtered.map(f)
def flatMap[B](f: A => IterableOnce[B]): Stream[B] = filtered.flatMap(f)
def foreach[U](f: A => U): Unit = filtered.foreach(f)
def withFilter(q: A => Boolean): collection.WithFilter[A, Stream] = new WithFilter(filtered, q)
}
/** An infinite Stream that repeatedly applies a given function to a start value.
*
* @param start the start value of the Stream
* @param f the function that's repeatedly applied
* @return the Stream returning the infinite sequence of values `start, f(start), f(f(start)), ...`
*/
def iterate[A](start: A)(f: A => A): Stream[A] = {
cons(start, iterate(f(start))(f))
}
/**
* Create an infinite Stream starting at `start` and incrementing by
* step `step`.
*
* @param start the start value of the Stream
* @param step the increment value of the Stream
* @return the Stream starting at value `start`.
*/
def from(start: Int, step: Int): Stream[Int] =
cons(start, from(start + step, step))
/**
* Create an infinite Stream starting at `start` and incrementing by `1`.
*
* @param start the start value of the Stream
* @return the Stream starting at value `start`.
*/
def from(start: Int): Stream[Int] = from(start, 1)
/**
* Create an infinite Stream containing the given element expression (which
* is computed for each occurrence).
*
* @param elem the element composing the resulting Stream
* @return the Stream containing an infinite number of elem
*/
def continually[A](elem: => A): Stream[A] = cons(elem, continually(elem))
private[Stream] def filteredTail[A](stream: Stream[A] @uncheckedVariance, p: A => Boolean, isFlipped: Boolean) = {
cons(stream.head, stream.tail.filterImpl(p, isFlipped))
}
private[Stream] def collectedTail[A, B](head: B, stream: Stream[A] @uncheckedVariance, pf: PartialFunction[A, B]) = {
cons(head, stream.tail.collect(pf))
}
/** This serialization proxy is used for Streams which start with a sequence of evaluated cons cells.
* The forced sequence is serialized in a compact, sequential format, followed by the unevaluated tail, which uses
* standard Java serialization to store the complete structure of unevaluated thunks. This allows the serialization
* of long evaluated streams without exhausting the stack through recursive serialization of cons cells.
*/
@SerialVersionUID(3L)
class SerializationProxy[A](@transient protected var coll: Stream[A]) extends Serializable {
private[this] def writeObject(out: ObjectOutputStream): Unit = {
out.defaultWriteObject()
var these = coll
while(these.nonEmpty && these.tailDefined) {
out.writeObject(these.head)
these = these.tail
}
out.writeObject(SerializeEnd)
out.writeObject(these)
}
private[this] def readObject(in: ObjectInputStream): Unit = {
in.defaultReadObject()
val init = new ArrayBuffer[A]
var initRead = false
while (!initRead) in.readObject match {
case SerializeEnd => initRead = true
case a => init += a.asInstanceOf[A]
}
val tail = in.readObject().asInstanceOf[Stream[A]]
coll = (init ++: tail)
}
protected[this] def readResolve(): Any = coll
}
}
| martijnhoekstra/scala | src/library/scala/collection/immutable/Stream.scala | Scala | apache-2.0 | 22,453 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.v2.avro
import java.net.URI
import scala.util.control.NonFatal
import org.apache.avro.Schema
import org.apache.avro.file.DataFileReader
import org.apache.avro.generic.{GenericDatumReader, GenericRecord}
import org.apache.avro.mapred.FsInput
import org.apache.hadoop.fs.Path
import org.apache.spark.TaskContext
import org.apache.spark.broadcast.Broadcast
import org.apache.spark.internal.Logging
import org.apache.spark.sql.avro.{AvroDeserializer, AvroOptions, AvroUtils}
import org.apache.spark.sql.catalyst.{InternalRow, NoopFilters, OrderedFilters}
import org.apache.spark.sql.connector.read.PartitionReader
import org.apache.spark.sql.execution.datasources.{DataSourceUtils, PartitionedFile}
import org.apache.spark.sql.execution.datasources.v2.{EmptyPartitionReader, FilePartitionReaderFactory, PartitionReaderWithPartitionValues}
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.sources.Filter
import org.apache.spark.sql.types.StructType
import org.apache.spark.util.SerializableConfiguration
/**
* A factory used to create AVRO readers.
*
* @param sqlConf SQL configuration.
* @param broadcastedConf Broadcast serializable Hadoop Configuration.
* @param dataSchema Schema of AVRO files.
* @param readDataSchema Required data schema of AVRO files.
* @param partitionSchema Schema of partitions.
* @param parsedOptions Options for parsing AVRO files.
*/
case class AvroPartitionReaderFactory(
sqlConf: SQLConf,
broadcastedConf: Broadcast[SerializableConfiguration],
dataSchema: StructType,
readDataSchema: StructType,
partitionSchema: StructType,
parsedOptions: AvroOptions,
filters: Seq[Filter]) extends FilePartitionReaderFactory with Logging {
override def buildReader(partitionedFile: PartitionedFile): PartitionReader[InternalRow] = {
val conf = broadcastedConf.value.value
val userProvidedSchema = parsedOptions.schema.map(new Schema.Parser().parse)
if (parsedOptions.ignoreExtension || partitionedFile.filePath.endsWith(".avro")) {
val reader = {
val in = new FsInput(new Path(new URI(partitionedFile.filePath)), conf)
try {
val datumReader = userProvidedSchema match {
case Some(userSchema) => new GenericDatumReader[GenericRecord](userSchema)
case _ => new GenericDatumReader[GenericRecord]()
}
DataFileReader.openReader(in, datumReader)
} catch {
case NonFatal(e) =>
logError("Exception while opening DataFileReader", e)
in.close()
throw e
}
}
// Ensure that the reader is closed even if the task fails or doesn't consume the entire
// iterator of records.
Option(TaskContext.get()).foreach { taskContext =>
taskContext.addTaskCompletionListener[Unit] { _ =>
reader.close()
}
}
reader.sync(partitionedFile.start)
val datetimeRebaseMode = DataSourceUtils.datetimeRebaseMode(
reader.asInstanceOf[DataFileReader[_]].getMetaString,
SQLConf.get.getConf(SQLConf.LEGACY_AVRO_REBASE_MODE_IN_READ))
val avroFilters = if (SQLConf.get.avroFilterPushDown) {
new OrderedFilters(filters, readDataSchema)
} else {
new NoopFilters
}
val fileReader = new PartitionReader[InternalRow] with AvroUtils.RowReader {
override val fileReader = reader
override val deserializer = new AvroDeserializer(
userProvidedSchema.getOrElse(reader.getSchema),
readDataSchema,
datetimeRebaseMode,
avroFilters)
override val stopPosition = partitionedFile.start + partitionedFile.length
override def next(): Boolean = hasNextRow
override def get(): InternalRow = nextRow
override def close(): Unit = reader.close()
}
new PartitionReaderWithPartitionValues(fileReader, readDataSchema,
partitionSchema, partitionedFile.partitionValues)
} else {
new EmptyPartitionReader[InternalRow]
}
}
}
| witgo/spark | external/avro/src/main/scala/org/apache/spark/sql/v2/avro/AvroPartitionReaderFactory.scala | Scala | apache-2.0 | 4,868 |
/**
* This file is part of the TA Buddy project.
* Copyright (c) 2013-2014 Alexey Aksenov [email protected]
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Global License version 3
* as published by the Free Software Foundation with the addition of the
* following permission added to Section 15 as permitted in Section 7(a):
* FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED
* BY Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS»,
* Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS» DISCLAIMS
* THE WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Global License for more details.
* You should have received a copy of the GNU Affero General Global License
* along with this program; if not, see http://www.gnu.org/licenses or write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA, 02110-1301 USA, or download the license from the following URL:
* http://www.gnu.org/licenses/agpl.html
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Global License.
*
* In accordance with Section 7(b) of the GNU Affero General Global License,
* you must retain the producer line in every report, form or document
* that is created or manipulated using TA Buddy.
*
* You can be released from the requirements of the license by purchasing
* a commercial license. Buying such a license is mandatory as soon as you
* develop commercial activities involving the TA Buddy software without
* disclosing the source code of your own applications.
* These activities include: offering paid services to customers,
* serving files in a web or/and network application,
* shipping TA Buddy with a closed source product.
*
* For more information, please contact Digimead Team at this
* address: [email protected]
*/
package org.digimead.tabuddy.desktop.model.editor.ui.view.editor
import org.digimead.tabuddy.desktop.core.support.App
import org.digimead.tabuddy.desktop.core.ui.support.TreeProxy
import org.digimead.tabuddy.desktop.model.editor.Messages
import org.digimead.tabuddy.model.element.Element
import org.eclipse.jface.action.{ Action, IAction }
import org.eclipse.jface.util.ConfigureColumns
import org.eclipse.jface.viewers.StructuredSelection
import org.eclipse.jface.window.SameShellProvider
import scala.concurrent.Future
/**
* Table actions
*/
trait TableActions {
this: Table ⇒
object ActionConfigureColumns extends Action("Configure Columns...") {
def apply() = ConfigureColumns.forTable(tableViewer.getTable(), new SameShellProvider(content.getShell()))
override def run() = apply()
}
object ActionAutoResize extends Action(Messages.autoresize_key, IAction.AS_CHECK_BOX) {
setChecked(true)
def apply(immediately: Boolean = false) = if (immediately)
autoresize(true)
else {
implicit val ec = App.system.dispatcher
Future { autoresize(false) } onFailure {
case e: Exception ⇒ log.error(e.getMessage(), e)
case e ⇒ log.error(e.toString())
}
}
override def run = if (isChecked()) apply()
}
object ActionResetSorting extends Action(Messages.resetSorting_text) {
// column -1 is user defined sorting
def apply(immediately: Boolean = false) = {
val comparator = tableViewer.getComparator().asInstanceOf[Table.TableComparator]
comparator.column = -1
tableViewer.refresh()
}
override def run = apply()
}
class ActionSelectInTree(val element: Element) extends Action(Messages.select_text) {
def apply() = content.tree.treeViewer.setSelection(new StructuredSelection(TreeProxy.Item(element)), true)
override def run() = apply()
}
object ActionShowTree extends Action(Messages.tree_text) {
def apply() = {
content.ActionHideTree.setChecked(false)
content.ActionHideTree()
}
override def run() = apply()
}
}
| digimead/digi-TABuddy-desktop | part-model-editor/src/main/scala/org/digimead/tabuddy/desktop/model/editor/ui/view/editor/TableActions.scala | Scala | agpl-3.0 | 4,311 |
package org.ergoplatform
import scorex.util.ModifierId
trait ErgoBoxAssets {
def value: Long
def tokens: Map[ModifierId, Long]
}
final case class ErgoBoxAssetsHolder(
val value: Long,
val tokens: Map[ModifierId, Long]
) extends ErgoBoxAssets
object ErgoBoxAssetsHolder {
def apply(value: Long): ErgoBoxAssetsHolder = ErgoBoxAssetsHolder(value, Map())
} | ScorexFoundation/sigmastate-interpreter | sigmastate/src/main/scala/org/ergoplatform/ErgoBoxAssets.scala | Scala | mit | 366 |
package com.twitter.scrooge.java_generator.test
object ApacheCompatibilityHelpers {
val cleanTypedefMetadata = """(new FieldValueMetaData\\([A-Z,a-z,0-9.]*).*?\\)\\)\\);""".r
val randomWhitespace = """[ ]*\\{""".r
def cleanWhitespace(actual: String, cleanEmptySemicolons: Boolean): Array[String] = {
val values = actual.split("\\n").map { s: String =>
s.trim
}.filter { s =>
!s.isEmpty
}.filter { s =>
!s.startsWith("/**") && !s.startsWith("*")
}.filter { s =>
!cleanEmptySemicolons || !s.equals(";")
}.map { s =>
val clean1 = cleanTypedefMetadata.findFirstMatchIn(s) match {
case Some(m) => m.group(1) + ")));"
case None => s
}
randomWhitespace.replaceAllIn(clean1, " {")
}
values
}
}
| thirstycrow/scrooge | scrooge-generator/src/main/scala/com/twitter/scrooge/java_generator/test/ApacheCompatibilityHelpers.scala | Scala | apache-2.0 | 777 |
package jp.co.bizreach.play2stub
import play.api.libs.ws.WSResponse
import play.api.mvc.{AnyContent, Request}
/**
* Created by scova0731 on 10/19/14.
*/
trait ParamBuilder {
def build(response: Option[WSResponse])(implicit request: Request[AnyContent],
route: Option[StubRoute]): Map[String, Any]
}
class PathAndQueryStringParamBuilder extends ParamBuilder {
def build(response: Option[WSResponse])(implicit request: Request[AnyContent],
route: Option[StubRoute]): Map[String, Any] =
Map("rawQueryString" -> request.rawQueryString, "path" -> request.path)
} | bizreach/play2-stub | src/main/scala/jp/co/bizreach/play2stub/ParamBuilder.scala | Scala | apache-2.0 | 597 |
package wom.callable
import wom.types.{WomMapType, WomStringType}
import wom.values.{WomMap, WomString, WomValue}
/**
* Parameter documentation quoted from CWL Spec.
*
* @param outputPath runtime.outdir: an absolute path to the designated output directory
* @param tempPath runtime.tmpdir: an absolute path to the designated temporary directory
* @param cores runtime.cores: number of CPU cores reserved for the tool process
* @param ram runtime.ram: amount of RAM in mebibytes (2**20) reserved for the tool process
* @param outputPathSize runtime.outdirSize: reserved storage space available in the designated output directory
* @param tempPathSize runtime.tmpdirSize: reserved storage space available in the designated temporary directory
*/
case class RuntimeEnvironment(outputPath: String,
tempPath: String,
cores: Int,
ram: Double,
outputPathSize: Long,
tempPathSize: Long) {
def cwlMap: WomValue = {
val womMap: Map[WomValue, WomValue] = Map(
"outdir" -> outputPath,
"tmpdir" -> tempPath,
"cores" -> cores.toString,
"ram" -> ram.toString,
"outdirSize" -> outputPathSize.toString,
"tmpdirSize" -> tempPathSize.toString
).map{
case (key, value) => WomString(key) -> WomString(value)
}
WomMap(WomMapType(WomStringType, WomStringType), womMap)
}
}
| ohsu-comp-bio/cromwell | wom/src/main/scala/wom/callable/RuntimeEnvironment.scala | Scala | bsd-3-clause | 1,486 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wvlet.airframe.examples.airspec
import wvlet.airframe._
import wvlet.airspec.AirSpec
/**
*/
class AirSpec_04_Session extends AirSpec {
override protected def design: Design = {
newDesign.bind[String].toInstance("hello")
}
test("overrideDesign") { (session: Session, s: String) =>
s shouldBe "hello"
val d = newDesign
.bind[String].toInstance("hello child")
session.withChildSession(d) { childSession =>
val cs = childSession.build[String]
cs shouldBe "hello child"
}
}
}
| wvlet/airframe | examples/src/test/scala/wvlet/airframe/examples/airspec/AirSpec_04_Session.scala | Scala | apache-2.0 | 1,093 |
package at.logic.gapt.examples
import at.logic.gapt.cli.GAPScalaInteractiveShellLibrary.parse
import at.logic.gapt.expr._
import at.logic.gapt.language.fol.{instantiate, FOLSubstitution, Utils}
import at.logic.gapt.proofs.lk._
import at.logic.gapt.proofs.lk.base.LKProof
// Functions to construct cut-free FOL LK proofs of the sequents
//
// P(0), \\ALL x . P(x) -> P(s(x)) :- P(s^n(0))
//
// where n is an Integer parameter >= 0.
object LinearExampleProof {
val s = "s"
val p = "P"
val c = "0"
def apply( n: Int ) = proof( 0, n )
// returns LKProof with end-sequent P(s^k(0)), \\ALL x . P(x) -> P(s(x)) :- P(s^n(0))
def proof( k: Int, n: Int ) : LKProof =
{
val x = FOLVar( "x" )
val ass = All( x, Imp( FOLAtom( p, x::Nil ), FOLAtom( p, FOLFunction( s, x::Nil )::Nil ) ) )
if ( k == n ) // leaf proof
{
val a = FOLAtom( p, Utils.numeral( n )::Nil )
WeakeningLeftRule( Axiom( a::Nil, a::Nil ), ass )
}
else
{
val p1 = FOLAtom( p, Utils.numeral( k )::Nil )
val p2 = FOLAtom( p, Utils.numeral( k + 1 )::Nil )
val aux = Imp( p1, p2 )
ContractionLeftRule( ForallLeftRule( ImpLeftRule( Axiom( p1::Nil, p1::Nil ), proof( k + 1, n ), p1, p2 ), aux, ass, Utils.numeral( k ) ), ass )
}
}
}
// Functions to construct cut-free FOL LK proofs of the sequents
//
// P(0,0), \\ALL x \\ALL y. P(x,y) -> P(s(x),y), \\ALL x \\ALL y. P(x,y) -> P(x,s(y)) :- P(s^n(0),s^n(0))
//
// where n is an Integer parameter >= 0.
//
// The proofs constructed here go along the diagonal of P, i.e. one X-step, then one Y-step, etc.
object SquareDiagonalExampleProof {
val s = "s"
val p = "P"
val c = "0"
def apply( n: Int ) = proof( 0, n )
// returns LKProof with end-sequent P(s^k(0),s^k(0)), \\ALL x \\ALL y. P(x,y) -> P(s(x),y), \\ALL x \\ALL y . P(x,y) -> P(x,s(y)) :- P(s^n(0),s^n(0))
def proof( k: Int, n: Int ) : LKProof =
{
val x = FOLVar( "x" )
val y = FOLVar( "y" )
val assx = All( x, All( y, Imp( FOLAtom( p, x::y::Nil ), FOLAtom(p, FOLFunction( s, x::Nil )::y::Nil ) ) ) )
def assx_aux( k: Int ) = All( y, Imp( FOLAtom( p, Utils.numeral( k )::y::Nil ), FOLAtom(p, Utils.numeral( k + 1 )::y::Nil ) ) )
val assy = All( x, All( y, Imp( FOLAtom( p, x::y::Nil ), FOLAtom(p, x::FOLFunction( s, y::Nil )::Nil ) ) ) )
def assy_aux( k: Int ) = All( y, Imp( FOLAtom( p, Utils.numeral( k )::y::Nil ), FOLAtom(p, Utils.numeral( k )::FOLFunction( s, y::Nil )::Nil ) ) )
if ( k == n ) // leaf proof
{
val a = FOLAtom( p, Utils.numeral( n )::Utils.numeral( n )::Nil )
WeakeningLeftRule( WeakeningLeftRule( Axiom( a:: Nil, a::Nil ), assx ), assy )
}
else
{
val ayl = FOLAtom( p, Utils.numeral( k + 1 )::Utils.numeral( k )::Nil ) // atom y left
val ayr = FOLAtom( p, Utils.numeral( k + 1 )::Utils.numeral( k + 1 )::Nil )
val auxy = Imp( ayl, ayr )
val p1 = ImpLeftRule( Axiom( ayl::Nil, ayl::Nil ), proof( k + 1, n), ayl, ayr )
val p2 = ForallLeftRule( p1, auxy, assy_aux( k + 1 ), Utils.numeral( k ) )
val p3 = ForallLeftRule( p2, assy_aux( k + 1 ), assy, Utils.numeral( k + 1) )
val p4 = ContractionLeftRule( p3, assy )
val axl = FOLAtom( p, Utils.numeral( k )::Utils.numeral( k )::Nil ) // atom x left
val axr = FOLAtom( p, Utils.numeral( k + 1 )::Utils.numeral( k )::Nil )
val auxx = Imp( axl, axr )
val p5 = ImpLeftRule( Axiom( axl::Nil, axl::Nil ), p4, axl, axr )
val p6 = ForallLeftRule( p5, auxx, assx_aux( k ), Utils.numeral( k ) )
val p7 = ForallLeftRule( p6, assx_aux( k ), assx, Utils.numeral( k ) )
ContractionLeftRule( p7, assx )
}
}
}
// Functions to construct cut-free FOL LK proofs of the sequents
//
// P(0,0), \\ALL x \\ALL y. P(x,y) -> P(s(x),y), \\ALL x \\ALL y. P(x,y) -> P(x,s(y)) :- P(s^n(0),s^n(0))
//
// where n is an Integer parameter >= 0.
//
// The proofs constructed here go along the edges of P, i.e. first all X-steps are performed, then all Y-steps are performed
object SquareEdgesExampleProof {
val s = "s"
val p = "P"
val c = "0"
val x = FOLVar( "x" )
val y = FOLVar( "y" )
val assx = All( x, All( y, Imp( FOLAtom( p, x::y::Nil ), FOLAtom(p, FOLFunction( s, x::Nil )::y::Nil ) ) ) )
def assx_aux( k: Int ) = All( y, Imp( FOLAtom( p, Utils.numeral( k )::y::Nil ), FOLAtom(p, Utils.numeral( k + 1 )::y::Nil ) ) )
val assy = All( x, All( y, Imp( FOLAtom( p, x::y::Nil ), FOLAtom(p, x::FOLFunction( s, y::Nil )::Nil ) ) ) )
def assy_aux( k: Int ) = All( y, Imp( FOLAtom( p, Utils.numeral( k )::y::Nil ), FOLAtom(p, Utils.numeral( k )::FOLFunction( s, y::Nil )::Nil ) ) )
def apply( n: Int ) = proof( 0, n )
// returns LKProof with end-sequent P(s^k(0),0), \\ALL x \\ALL y. P(x,y) -> P(s(x),y), \\ALL x \\ALL y. P(x,y) -> P(x,s(y)) :- P(s^n(0),s^n(0))
def proof( k: Int, n: Int ) : LKProof =
{
if ( k == n )
{
val p1 = ForallLeftRule( upper_proof( 0, n ), assy_aux( n ), assy, Utils.numeral( n ) )
WeakeningLeftRule( p1, assx )
}
else
{
val pk = FOLAtom( p, Utils.numeral( k )::Utils.numeral( 0 )::Nil )
val pkp1 = FOLAtom( p, Utils.numeral( k + 1 )::Utils.numeral( 0 )::Nil )
val impl = Imp( pk, pkp1 )
ContractionLeftRule(
ForallLeftRule(
ForallLeftRule(
ImpLeftRule(
Axiom( pk::Nil, pk::Nil ),
proof( k + 1, n ),
pk, pkp1 ),
impl, assx_aux( k ), Utils.numeral( 0 )),
assx_aux( k ), assx, Utils.numeral( k )),
assx )
}
}
// returns LKProof with end-sequent P(s^n(0),s^k(0)), \\ALL y . P(s^n(0),y) -> P(s^n(0),s(y)) :- P(s^n(0),s^n(0))
def upper_proof( k: Int, n: Int ) : LKProof =
{
if ( k == n ) // leaf proof
{
val a = FOLAtom( p, Utils.numeral( n )::Utils.numeral( n )::Nil )
WeakeningLeftRule( Axiom( a::Nil, a::Nil ), assy_aux( n ) )
}
else
{
val pk = FOLAtom( p, Utils.numeral( n )::Utils.numeral( k )::Nil )
val pkp1 = FOLAtom( p, Utils.numeral( n )::Utils.numeral( k + 1 )::Nil )
val impl = Imp( pk, pkp1 )
ContractionLeftRule( ForallLeftRule( ImpLeftRule( Axiom( pk::Nil, pk::Nil ), upper_proof( k + 1, n ), pk, pkp1 ), impl, assy_aux( n ), Utils.numeral( k ) ), assy_aux( n ))
}
}
}
// Functions to construct cut-free FOL LK proofs of the sequents
//
// P(a,b), \\ALL x \\ALL y. P(x,y) -> P(sx(x),y), \\ALL x \\ALL y. P(x,y) -> P(x,sx(y)) :- P(sx^n(a),sy^n(b))
//
// where n is an Integer parameter >= 0.
//
// The proofs constructed here go along the edges of P, i.e. first all X-steps are performed, then all Y-steps are performed,
// but unlike SquareEdgesExampleProof, different functions are used for the X- and the Y-directions.
object SquareEdges2DimExampleProof {
//separate sucessor for the x- and y-directions
val sx = "s_x"
val sy = "s_y"
//0 of the x-axis
val a= "a"
//0 of the y-axis
val b = "b"
val p = "P"
val x = FOLVar( "x" )
val y = FOLVar( "y" )
//Converts integers into terms consisting of nested application of the successor function to 0
def numeralX (n: Int) = Utils.iterateTerm(FOLConst( a ), sx, n)
def numeralY (n: Int) = Utils.iterateTerm(FOLConst( b ), sy, n)
val assx = All( x, All( y, Imp( FOLAtom( p, x::y::Nil ), FOLAtom(p, FOLFunction( sx, x::Nil )::y::Nil ) ) ) )
def assx_aux( k: Int ) = All( y, Imp( FOLAtom( p, numeralX(k)::y::Nil ), FOLAtom(p, numeralX(k + 1)::y::Nil ) ) )
val assy = All( x, All( y, Imp( FOLAtom( p, x::y::Nil ), FOLAtom(p, x::FOLFunction( sy, y::Nil )::Nil ) ) ) )
def assy_aux( k: Int ) = All( y, Imp( FOLAtom( p, numeralX( k )::y::Nil ), FOLAtom(p, numeralX( k )::FOLFunction( sy, y::Nil )::Nil ) ) )
def apply( n: Int ) = proof( 0, n )
// returns LKProof with end-sequent P(sx^k(a),0), \\ALL x \\ALL y. P(x,y) -> P(sx(x),y), \\ALL x \\ALL y. P(x,y) -> P(x,sy(y)) :- P(sx^n(a),sy^n(b))
def proof( k: Int, n: Int ) : LKProof =
{
if ( k == n )
{
val p1 = ForallLeftRule( upper_proof( 0, n ), assy_aux( n ), assy, numeralX( n ) )
WeakeningLeftRule( p1, assx )
}
else
{
val pk = FOLAtom( p, numeralX(k)::numeralY(0)::Nil )
val pkp1 = FOLAtom( p, numeralX( k + 1)::numeralY(0)::Nil )
val impl = Imp( pk, pkp1 )
ContractionLeftRule(
ForallLeftRule(
ForallLeftRule(
ImpLeftRule(
Axiom( pk::Nil, pk::Nil ),
proof( k + 1, n ),
pk, pkp1),
impl, assx_aux( k ), numeralY( 0 )), //possibly not correct -> switch?
assx_aux( k ), assx, numeralX( k )), //same
assx )
}
}
// returns LKProof with end-sequent P(s^n(0),s^k(0)), \\ALL y . P(s^n(0),y) -> P(s^n(0),s(y)) :- P(s^n(0),s^n(0))
//Conjecture: this is the part that goes in the Y-direction.
def upper_proof( k: Int, n: Int ) : LKProof =
{
if ( k == n ) // leaf proof
{
val ax = FOLAtom( p, numeralX( n )::numeralY( n )::Nil )
WeakeningLeftRule( Axiom( ax::Nil, ax::Nil ), assy_aux( n ) )
}
else
{
val pk = FOLAtom( p, numeralX( n )::numeralY( k )::Nil )
val pkp1 = FOLAtom( p, numeralX( n )::numeralY( k + 1 )::Nil )
val impl = Imp( pk, pkp1 )
ContractionLeftRule(
ForallLeftRule(
ImpLeftRule(
Axiom( pk::Nil, pk::Nil ),
upper_proof( k + 1, n ),
pk,
pkp1),
impl,
assy_aux( n ),
numeralY( k )), //possibly not correct: switch or maybe restructure.
assy_aux( n ))
}
}
}
// Functions to construct the straightforward cut-free FOL LK proofs of the sequents
//
// P(s^n(0),0), \\ALL x \\ALL y . P(s(x),y) -> P(x,s(y)) :- P(0,s^n(0))
//
// where n is an Integer parameter >= 0.
object SumExampleProof {
val s = "s"
val p = "P"
val x = FOLVar( "x" )
val y = FOLVar( "y" )
val ass = All( x, All( y, Imp( FOLAtom( p, FOLFunction( s, x::Nil )::y::Nil ), FOLAtom( p, x::FOLFunction( s, y::Nil )::Nil ) ) ) )
def ass_inst( x: Int ) = All( y, Imp( FOLAtom( p, FOLFunction( s, Utils.numeral( x )::Nil )::y::Nil ), FOLAtom( p, Utils.numeral( x )::FOLFunction( s, y::Nil )::Nil ) ) )
def ass_inst_inst( x: Int, y: Int ) = Imp( FOLAtom( p, FOLFunction( s, Utils.numeral( x )::Nil )::Utils.numeral( y )::Nil ), FOLAtom( p, Utils.numeral( x )::FOLFunction( s, Utils.numeral( y )::Nil )::Nil ) )
def apply( n: Int ) = proof( 0, n )
// returns LKProof with end-sequent P(s^{n-k}(0),s^k(0)), \\ALL x \\ALL y. P(s(x),y) -> P(x,s(y)) :- P(0,s^n(0))
def proof( k: Int, n: Int ) : LKProof =
{
if ( k == n ) // leaf proof
{
val a = FOLAtom( p, Utils.numeral( 0 )::Utils.numeral( n )::Nil )
WeakeningLeftRule( Axiom( a::Nil, a::Nil ), ass )
}
else
{
val a1 = FOLAtom( p, Utils.numeral( n - k )::Utils.numeral( k )::Nil )
val a2 = FOLAtom( p, Utils.numeral( n - (k + 1) )::Utils.numeral( k + 1 )::Nil )
ContractionLeftRule(
ForallLeftRule(
ForallLeftRule(
ImpLeftRule(
Axiom( a1::Nil, a1::Nil ),
proof( k + 1, n ),
a1, a2 ),
ass_inst_inst( n - (k + 1), k ), ass_inst( n - (k + 1) ), Utils.numeral( k ) ),
ass_inst( n - (k + 1)), ass, Utils.numeral( n - (k + 1) ) ),
ass )
}
}
}
// Functions to construct cut-free FOL LK proofs of the sequents
//
// Refl, Trans, \\ALL x. f(x) = x :- f^n(a) = a
//
// where n is an Integer parameter >= 0.
object LinearEqExampleProof {
val a = "a"
val f = "f"
val x = FOLVar( "x")
val y = FOLVar( "y")
val z = FOLVar( "z")
val Refl = All( x, Eq(x ,x))
val Ass = All( x, Eq( FOLFunction( f, x::Nil ), x ))
val Trans = All( x, All( y, All( z, Imp( Eq(x, y), Imp( Eq(y, z), Eq(x, z) ) ) ) ) )
def apply( n: Int ) = proof( n )
// returns LKProof with end-sequent Refl, Trans, \\ALL x. f(x) = x :- f^k(a) = a
def proof( k: Int ) : LKProof = {
if ( k == 0 ) // leaf proof
{
val a_eq_a = Eq( Utils.iterateTerm( FOLConst( a ), f, 0 ), Utils.iterateTerm( FOLConst( a ), f, 0 ) )
WeakeningLeftRule( WeakeningLeftRule( ForallLeftRule( Axiom( a_eq_a::Nil, a_eq_a::Nil ), a_eq_a, Refl, FOLConst( a ) ), Trans ), Ass )
}
else
{
// atoms
val ka_eq_a = Eq( Utils.iterateTerm( FOLConst( a ), f, k ), Utils.iterateTerm( FOLConst( a ), f, 0 ) )
val ka_eq_ka = Eq( Utils.iterateTerm( FOLConst( a ), f, k ), Utils.iterateTerm( FOLConst( a ), f, k ) )
val kma_eq_a = Eq( Utils.iterateTerm( FOLConst( a ), f, k-1 ), Utils.iterateTerm( FOLConst( a ), f, 0 ) )
val ka_eq_kma = Eq( Utils.iterateTerm( FOLConst( a ), f, k ), Utils.iterateTerm( FOLConst( a ), f, k-1 ) )
val ka_eq_z = Eq( Utils.iterateTerm( FOLConst( a ), f, k ), z )
val kma_eq_z = Eq( Utils.iterateTerm( FOLConst( a ), f, k-1 ), z )
val y_eq_z = Eq( y, z )
val ka_eq_y = Eq( Utils.iterateTerm( FOLConst( a ), f, k ), y )
val x_eq_y = Eq( x, y )
val x_eq_z = Eq( x, z )
// prop. formulas
val Trans2 = Imp( kma_eq_a, ka_eq_a )
val Trans3 = Imp( ka_eq_kma, Trans2 )
// quant. formulas
val Trans3_1 = All( z, Imp( ka_eq_kma, Imp( kma_eq_z, ka_eq_z ) ) )
val Trans3_2 = All( y, All( z, Imp( ka_eq_y, Imp( y_eq_z, ka_eq_z ) ) ) )
val Trans3_3 = All( x, All( y, All( z, Imp( x_eq_y, Imp( y_eq_z, x_eq_z ) ) ) ) )
// prop. proofs
val p1 = ImpLeftRule( proof( k-1 ), Axiom( ka_eq_a::Nil, ka_eq_a::Nil ), kma_eq_a, ka_eq_a )
val p0 = Axiom( ka_eq_kma::Nil, ka_eq_kma::Nil )
val p2 = ImpLeftRule( p0, p1, ka_eq_kma, Trans2 )
// proofs containing quantifiers
val p3 = ForallLeftRule( p2, Trans3, Trans3_1, Utils.iterateTerm( FOLConst( a ), f, 0 ) )
val p4 = ForallLeftRule( p3, Trans3_1, Trans3_2, Utils.iterateTerm( FOLConst( a ), f, k-1 ) )
val p5 = ForallLeftRule( p4, Trans3_2, Trans3_3, Utils.iterateTerm( FOLConst( a ), f, k ) )
val p6 = ForallLeftRule( p5, ka_eq_kma, Ass, Utils.iterateTerm( FOLConst( a ), f, k-1 ) )
val p7 = ContractionLeftRule( p6, Ass )
val p8 = ContractionLeftRule( p7, Trans )
p8
}
}
}
object SumOfOnesF2ExampleProof {
val s = "s"
val zero = "0"
val p = "+"
var f = "f"
val x = FOLVar( "x")
val y = FOLVar( "y")
val z = FOLVar( "z")
//Helpers
def Fn(n: Int) = FOLFunction(f, Utils.numeral(n)::Nil)
//Forall x.(x + 1 = s(x)) (reversed to avoid the application of the symmetry of =)
val Plus = All(x, Eq(FOLFunction(p, x::Utils.numeral(1)::Nil), FOLFunction(s, x::Nil)))
def PlusX(x:FOLTerm) = Eq(FOLFunction(p, x::Utils.numeral(1)::Nil),FOLFunction(s, x::Nil))
//Forall xyz.(y=z -> (x+y=x+z))
val EqPlus = All(x, All(y, All(z, Imp(Eq( y, z), Eq(FOLFunction(p, y::x::Nil), FOLFunction(p, z::x::Nil)) ) )))
def EqPlusX(x:FOLTerm) = All(y, All(z, Imp(Eq( y, z), Eq( FOLFunction(p, y::x::Nil), FOLFunction(p, z::x::Nil)) ) ))
def EqPlusXY(x:FOLTerm, y:FOLTerm) = All(z, Imp(Eq( y, z), Eq( FOLFunction(p, y::x::Nil), FOLFunction(p, z::x::Nil)) ) )
def EqPlusXYZ(x:FOLTerm, y:FOLTerm, z:FOLTerm) = Imp(Eq( y, z), Eq( FOLFunction(p, y::x::Nil), FOLFunction(p, z::x::Nil)) )
//Forall xyz.(x = y ^ y = z -> x = z)
val Trans = All(x, All(y, All(z, Imp(And(Eq( x, y) , Eq( y, z) ), Eq( x, z)))))
//Definition of f
//f(0) = 0
val FZero = Eq(FOLFunction(f, Utils.numeral(0)::Nil), Utils.numeral(0))
//Forall x.f(s(x)) = f(x) + s(0)
val FSucc = All(x, Eq( FOLFunction(f, FOLFunction(s, x::Nil)::Nil), FOLFunction(p, FOLFunction(f, x::Nil)::Utils.numeral(1)::Nil)))
def FSuccX(x:FOLTerm) = Eq( FOLFunction(f, FOLFunction(s, x::Nil)::Nil), FOLFunction(p, FOLFunction(f, x::Nil)::Utils.numeral(1)::Nil))
//The starting axiom f(n) = n |- f(n) = n
def start(n: Int) = Axiom(Eq( Fn(n), Utils.numeral(n))::Trans::Plus::EqPlus::FSucc::Nil, Eq( Fn(n), Utils.numeral(n))::Nil)
def apply(n: Int) = RecProof(start(n), n)
/** Recursively constructs the proof, starting with the proof s1.
*/
def RecProof(s1: LKProof, n: Int) : LKProof = {
if (n <= 0) { s1 }
else {
val fn_eq_n = Eq( Fn(n-1), Utils.numeral(n-1))
val fn_s0 = FOLFunction(p, Fn(n-1)::Utils.numeral(1)::Nil)
val n_s0 = FOLFunction(p, Utils.numeral(n-1)::Utils.numeral(1)::Nil)
val tr = TransRule(Fn(n), n_s0, Utils.numeral(n), s1)
val tr2 = TransRule(Fn(n), fn_s0, n_s0, tr)
val impl = ImpLeftRule(Axiom(fn_eq_n::Nil, fn_eq_n::Nil), tr2, fn_eq_n, Eq( fn_s0, n_s0))
//Instantiate FSucc
val allQFSucc = ForallLeftRule(impl, FSuccX(Utils.numeral(n-1)) , FSucc, Utils.numeral(n-1))
val clFSucc = ContractionLeftRule(allQFSucc, FSucc)
//Instantiate Plus
val allQPlus = ForallLeftRule(clFSucc, PlusX(Utils.numeral(n-1)) , Plus, Utils.numeral(n-1))
val clPlus = ContractionLeftRule(allQPlus, Plus)
//Instantiare EqPlus (x=(s0), y=Fn(n-1), z=n-1)
val eqx = Utils.numeral(1)
val eqy = Fn(n-1)
val eqz = Utils.numeral(n-1)
val allQEqPlusZ = ForallLeftRule(clPlus, EqPlusXYZ(eqx, eqy, eqz) , EqPlusXY(eqx, eqy), eqz)
val allQEqPlusYZ = ForallLeftRule(allQEqPlusZ, EqPlusXY(eqx, eqy) , EqPlusX(eqx), eqy)
val allQEqPlusXYZ = ForallLeftRule(allQEqPlusYZ, EqPlusX(eqx) , EqPlus, eqx)
val clEqPlus = ContractionLeftRule(allQEqPlusXYZ, EqPlus)
RecProof(clEqPlus, n-1)
}
}
}
/** Constructs the cut-free FOL LK proof of the sequent
*
* AUX, f(0) = 0, Forall x.f(s(x)) = f(x) + s(0) |- f(s^n(0)) = s^n(0)
* Where AUX is {Transitivity, Symmetry, Reflexity of =,
* Forall xy.x=y -> s(x) = s(y), f(0) = 0, Forall x.f(s(x)) = f(x) + s(0)}
*/
object SumOfOnesFExampleProof {
val s = "s"
val zero = "0"
val p = "+"
var f = "f"
val x = FOLVar( "x")
val y = FOLVar( "y")
val z = FOLVar( "z")
//Helpers
def Fn(n: Int) = FOLFunction(f, Utils.numeral(n)::Nil)
//Forall xyz.(x = y ^ y = z -> x = z)
val Trans = All(x, All(y, All(z, Imp(And(Eq( x, y) , Eq( y, z) ), Eq( x, z)))))
//Forall xy.(x=y -> s(x) = s(y))
val CongSucc = All(x, All(y, Imp( Eq( x, y), Eq( FOLFunction(s, x::Nil), FOLFunction(s, y::Nil)))))
def CongSuccX(x:FOLTerm) = All(y, Imp( Eq( x, y), Eq( FOLFunction(s, x::Nil), FOLFunction(s, y::Nil))))
def CongSuccXY(x:FOLTerm, y:FOLTerm) = Imp( Eq( x, y), Eq(FOLFunction(s, x::Nil), FOLFunction(s, y::Nil)))
//Forall x.(x + 1 = s(x)) (reversed to avoid the application of the symmetry of =)
val Plus = All(x, Eq( FOLFunction(p, x::Utils.numeral(1)::Nil), FOLFunction(s, x::Nil)))
def PlusX(x:FOLTerm) = Eq( FOLFunction(p, x::Utils.numeral(1)::Nil), FOLFunction(s, x::Nil))
//Definition of f
//f(0) = 0
val FZero = Eq( FOLFunction(f, Utils.numeral(0)::Nil), Utils.numeral(0))
//Forall x.f(s(x)) = f(x) + s(0)
val FSucc = All(x, Eq( FOLFunction(f, FOLFunction(s, x::Nil)::Nil), FOLFunction(p, FOLFunction(f, x::Nil)::Utils.numeral(1)::Nil)))
def FSuccX(x:FOLTerm) = Eq( FOLFunction(f, FOLFunction(s, x::Nil)::Nil), FOLFunction(p, FOLFunction(f, x::Nil)::Utils.numeral(1)::Nil))
//The starting axiom f(n) = n |- f(n) = n
def start(n: Int) = Axiom(Eq( Fn(n), Utils.numeral(n))::Trans::Plus::CongSucc::FSucc::Nil, Eq( Fn(n), Utils.numeral(n))::Nil)
def apply(n: Int) = proof(n)
def proof (n: Int) = TermGenProof(EqChainProof(start(n), n), 0, n)
/** Generates a sequent containing, in addition to the formulas in the bottommost sequent of s1,
* the chain of equations f(n) = s(f(n-1)),...,f(1)=s(f(0)), f(0) = 0.s
* The generates proof employs only the axiom of transitivity and (x=y -> s(x) = s(y)))
*
* TODO should be private - but scala shell does not allow access modifiers when :loading a file
*/
def EqChainProof (s1: LKProof, n: Int) : LKProof = {
if (n <= 0) { s1 }
else {
val tr = TransRule(Fn(n), Utils.iterateTerm(Fn(n-1), s, 1), Utils.numeral(n), s1)
val ax2 = Axiom(Eq( Fn(n-1), Utils.numeral(n-1))::Nil, Eq( Fn(n-1), Utils.numeral(n-1))::Nil)
//Introduces the instantiated form of CongSuc
val impl = ImpLeftRule(ax2, tr, Eq( Fn(n-1), Utils.numeral(n-1)), Eq( Utils.iterateTerm(Fn(n-1), s, 1), Utils.numeral(n)))
//Quantify CongSucc
val cong1 = ForallLeftRule(impl, CongSuccXY(Fn(n-1), Utils.numeral(n-1)), CongSuccX(Fn(n-1)), Utils.numeral(n-1))
val cong2 = ForallLeftRule(cong1, CongSuccX(Fn(n-1)), CongSucc, Fn(n-1))
val cl = ContractionLeftRule(cong2, CongSucc)
EqChainProof(cl, n-1)
}
}
/** Given a proof s1, produced by EqChainProof, generates a proof that
* eliminates the chains of equasions and proves the final sequent
* FZero, FSucc, TR, Plus |- f(n) = n.
*
* TODO should be private - but scala shell does not allow access modifiers when :loading a file
*/
def TermGenProof (s1: LKProof, n: Int, targetN: Int) : LKProof = {
if (n >= targetN) { s1 }
else {
val tr = TransRule(Fn(n+1), FOLFunction(p, Fn(n)::Utils.numeral(1)::Nil), Utils.iterateTerm(Fn(n), s, 1), s1)
//Quantify plus
val plus = ForallLeftRule(tr, PlusX(Fn(n)), Plus, Fn(n))
val clPlus = ContractionLeftRule(plus, Plus)
//Quantify fsucc
val fsucc = ForallLeftRule(clPlus, FSuccX(Utils.numeral(n)), FSucc, Utils.numeral(n))
val clFSucc = ContractionLeftRule(fsucc, FSucc)
TermGenProof(clFSucc, n+1, targetN)
}
}
}
// Functions to construct cut-free FOL LK proofs of the sequents
//
// Refl, Trans, CongSuc, ABase, ASuc, :- sum( n ) = s^n(0)
//
// where n is an Integer parameter >= 0.
object SumOfOnesExampleProof {
val s = "s"
val zero = "0"
val p = "+"
val x = FOLVar( "x")
val y = FOLVar( "y")
val z = FOLVar( "z")
// axioms
val Refl = All( x, Eq( x, x ))
val Trans = All( x, All( y, All( z, Imp( Eq( x, y ), Imp( Eq( y, z ), Eq( x, z ) ) ) ) ) )
val CongSuc = All( x, All( y, Imp( Eq( x, y ),
Eq( FOLFunction( s, x::Nil ), FOLFunction( s, y::Nil )) ) ) )
val ABase = All( x, Eq(FOLFunction( p, x::FOLConst( zero )::Nil ), x ) )
val ASuc = All( x, All( y, Eq( FOLFunction( p, x::FOLFunction( s, y::Nil )::Nil ), FOLFunction( s, FOLFunction( p, x::y::Nil )::Nil ) ) ) )
def apply( n: Int ) = proof( n )
// TODO should be private - but scala shell does not allow access modifiers when :loading a file
def proof( k: Int ) : LKProof = {
if ( k == 0 )
{
val zero_eq_zero = Eq( Utils.numeral( 0 ), Utils.numeral( 0 ) )
val p1 = ForallLeftRule( Axiom( zero_eq_zero::Nil, zero_eq_zero::Nil ), zero_eq_zero, Refl, Utils.numeral( 0 ) )
val p2 = WeakeningLeftRule( p1, Trans )
val p3 = WeakeningLeftRule( p2, CongSuc )
val p4 = WeakeningLeftRule( p3, ABase )
WeakeningLeftRule( p4, ASuc )
}
else if ( k == 1 )
{
val one_eq_one = Eq( Utils.numeral( 1 ), Utils.numeral( 1 ) )
val p1 = ForallLeftRule( Axiom( one_eq_one::Nil, one_eq_one::Nil ), one_eq_one, Refl, Utils.numeral( 1 ) )
val p2 = WeakeningLeftRule( p1, Trans )
val p3 = WeakeningLeftRule( p2, CongSuc )
val p4 = WeakeningLeftRule( p3, ABase )
WeakeningLeftRule( p4, ASuc )
}
else
{
/// atoms
val ssumkm1_eq_k = Eq( FOLFunction( s, sum( k-1 )::Nil ), Utils.numeral( k ) )
val ssumkm1_eq_z = Eq( FOLFunction( s, sum( k-1 )::Nil ), z )
val sumk_eq_k = Eq( sum( k ), Utils.numeral( k ) )
val sumk_eq_y = Eq( sum( k ), y )
val sumk_eq_z = Eq( sum( k ), z )
val y_eq_z = Eq( y, z )
val sumk_eq_ssumkm1 = Eq( sum( k ), FOLFunction( s, sum( k-1 )::Nil ) )
val sumkm1_eq_km1 = Eq( sum( k-1 ), Utils.numeral( k-1 ) )
val sumkm1_eq_y = Eq( sum( k-1 ), y )
val ssumkm1_eq_sy = Eq( FOLFunction( s, sum( k-1 )::Nil ), FOLFunction( s, y::Nil ) )
/// prop. formulas
val Trans2 = Imp( ssumkm1_eq_k, sumk_eq_k )
val Trans3 = Imp( sumk_eq_ssumkm1, Trans2 )
val CongSuc2 = Imp( sumkm1_eq_km1, ssumkm1_eq_k )
/// quant. formulas
val Trans3_1 = All( z, Imp( sumk_eq_ssumkm1, Imp( ssumkm1_eq_z, sumk_eq_z ) ) )
val Trans3_2 = All( y, All( z, Imp( sumk_eq_y, Imp( y_eq_z, sumk_eq_z ) ) ) )
val CongSuc2_1 = All( y, Imp( sumkm1_eq_y, ssumkm1_eq_sy ) )
/// proof
// transitivity (using aux_proof)
val p1 = Axiom( ssumkm1_eq_k::Nil, ssumkm1_eq_k::Nil )
val p2 = Axiom( sumk_eq_k::Nil, sumk_eq_k::Nil )
val p3 = ImpLeftRule( p1, p2, ssumkm1_eq_k, sumk_eq_k)
val p4 = aux_proof( k-1 )
val p5 = ImpLeftRule( p4, p3, sumk_eq_ssumkm1, Trans2 )
val p6 = ForallLeftRule( p5, Trans3, Trans3_1, Utils.numeral( k ) )
val p7 = ForallLeftRule( p6, Trans3_1, Trans3_2, FOLFunction( s, sum( k-1 )::Nil ) )
val p8 = ForallLeftRule( p7, Trans3_2, Trans, sum( k ) )
val p9 = ContractionLeftRule( p8, Trans )
// congruence sucessor (using IH)
val p10 = proof( k-1 )
val p11 = ImpLeftRule( p10, p9, sumkm1_eq_km1, ssumkm1_eq_k )
val p12 = ContractionLeftRule( p11, Trans )
val p13 = ContractionLeftRule( p12, CongSuc )
val p14 = ContractionLeftRule( p13, ASuc )
val p15 = ContractionLeftRule( p14, ABase )
val p16 = ForallLeftRule( p15, CongSuc2, CongSuc2_1, Utils.numeral( k-1 ) )
val p17 = ForallLeftRule( p16, CongSuc2_1, CongSuc, sum( k-1 ) )
ContractionLeftRule( p17, CongSuc )
}
}
// constructs proof of: Trans, CongSuc, ASuc, ABase :- sum( k + 1 ) = s( sum( k ) )
// TODO should be private - but scala shell does not allow access modifiers when :loading a file
def aux_proof( k: Int ) : LKProof = {
/// atoms
val ssumkp0_eq_ssumk = Eq( FOLFunction( s, FOLFunction( p, sum( k )::Utils.numeral( 0 )::Nil )::Nil ), FOLFunction( s, sum( k )::Nil ) )
val sumkp1_eq_ssumk = Eq( sum( k+1 ), FOLFunction( s, sum( k )::Nil ) )
val sumkp1_eq_ssumkp0 = Eq( sum( k+1 ), FOLFunction( s, FOLFunction( p, sum( k )::Utils.numeral( 0 )::Nil )::Nil ) )
val ssumkp0_eq_z = Eq( FOLFunction( s, FOLFunction( p, sum( k )::Utils.numeral( 0 )::Nil )::Nil ), z )
val sumkp1_eq_z = Eq( sum( k+1 ), z )
val sumkp1_eq_y = Eq( sum( k+1 ), y )
val y_eq_z = Eq( y, z )
val sumkp0_eq_sumk = Eq( FOLFunction( p, sum( k )::Utils.numeral( 0 )::Nil ), sum( k ) )
val sumkp0_eq_y = Eq( FOLFunction( p, sum( k )::Utils.numeral( 0 )::Nil ), y )
val ssumkp0_eq_sy = Eq( FOLFunction( s, FOLFunction( p, sum( k )::Utils.numeral( 0 )::Nil )::Nil ), FOLFunction( s, y::Nil ) )
val sumkpsy_eq_ssumkpy = Eq( FOLFunction( p, sum( k )::FOLFunction( s, y::Nil)::Nil ), FOLFunction( s, FOLFunction( p, sum( k )::y::Nil )::Nil ) )
/// prop. formulas
val Trans2 = Imp( ssumkp0_eq_ssumk, sumkp1_eq_ssumk )
val Trans3 = Imp( sumkp1_eq_ssumkp0, Trans2 )
val Cong2 = Imp( sumkp0_eq_sumk, ssumkp0_eq_ssumk )
/// quant. formulas
val Trans3_1 = All( z, Imp( sumkp1_eq_ssumkp0, Imp( ssumkp0_eq_z, sumkp1_eq_z ) ) )
val Trans3_2 = All( y, All( z, Imp( sumkp1_eq_y, Imp( y_eq_z, sumkp1_eq_z ) ) ) )
val Cong2_1 = All( y, Imp( sumkp0_eq_y, ssumkp0_eq_sy ) )
val ASuc_1 = All( y, sumkpsy_eq_ssumkpy )
/// proof
// transitivity
val p1 = Axiom( ssumkp0_eq_ssumk::Nil, ssumkp0_eq_ssumk::Nil )
val p2 = Axiom( sumkp1_eq_ssumk::Nil, sumkp1_eq_ssumk::Nil )
val p3 = ImpLeftRule( p1, p2, ssumkp0_eq_ssumk, sumkp1_eq_ssumk )
val p4 = Axiom( sumkp1_eq_ssumkp0::Nil, sumkp1_eq_ssumkp0::Nil )
val p5 = ImpLeftRule( p4, p3, sumkp1_eq_ssumkp0, Trans2 )
val p6 = ForallLeftRule( p5, Trans3, Trans3_1, FOLFunction( s, sum( k )::Nil ) )
val p7 = ForallLeftRule( p6, Trans3_1, Trans3_2, FOLFunction( s, FOLFunction( p, sum( k )::Utils.numeral( 0 )::Nil )::Nil ) )
val p8 = ForallLeftRule( p7, Trans3_2, Trans, sum( k+1 ) )
// congruence sucessor
val p9 = Axiom( sumkp0_eq_sumk::Nil, sumkp0_eq_sumk::Nil )
val p10 = ImpLeftRule( p9, p8, sumkp0_eq_sumk, ssumkp0_eq_ssumk )
val p11 = ForallLeftRule( p10, Cong2, Cong2_1, sum( k ) )
val p12 = ForallLeftRule( p11, Cong2_1, CongSuc, FOLFunction( p, sum( k )::Utils.numeral( 0 )::Nil ) )
// addition sucessor case
val p13 = ForallLeftRule( p12, sumkp1_eq_ssumkp0, ASuc_1, Utils.numeral( 0 ) )
val p14 = ForallLeftRule( p13, ASuc_1, ASuc, sum( k ) )
// addition base case
ForallLeftRule( p14, sumkp0_eq_sumk, ABase, sum( k ) )
}
// the term (.((1 + 1) + 1 ) + ... + 1 ), k must be at least 1
// TODO should be private - but scala shell does not allow access modifiers when :loading a file
def sum( k: Int ) : FOLTerm = {
if ( k == 1 ) Utils.numeral( 1 )
else FOLFunction( p, sum( k-1 )::Utils.numeral( 1 )::Nil )
}
}
/**
* Auxiliary structure to deal with axioms of the schema:
* Forall variables cond1 -> cond2 -> ... -> condn -> consequence |- ...
*/
class AllQuantifiedConditionalAxiomHelper(variables: List[FOLVar], conditions: List[FOLFormula], consequence: FOLFormula) {
/**
* Returns the full axiom
*/
def get_axiom(): FOLFormula = {
// TODO: refactor apply_conditional_equality, combine duplicate code
var impl_chain = consequence
for (elem <- conditions.reverse) {
impl_chain = Imp(elem, impl_chain)
}
def quantify(variables: List[FOLVar], body: FOLFormula): FOLFormula = {
variables match {
case Nil => body
case head :: tail => All(head, quantify(tail, body))
}
}
quantify(variables, impl_chain)
}
/**
* Use axiom with given expressions in proof.
* Consequence of axiom must appear in current proof.
* Instantiated conditions will of course remain in the antecedent of the returned proof
*/
def apply(expressions: List[FOLTerm], p: LKProof): LKProof = {
assert(expressions.length == variables.length, "Number of expressions doesn't equal number of variables")
// construct implication with instantiated conditions and consequence
var instantiated_conditions = conditions
var instantiated_consequence = consequence
for (i <- 0 to variables.length - 1) {
val substitute = (x: FOLFormula) => FOLSubstitution(variables(i), expressions(i))(x)
instantiated_conditions = instantiated_conditions.map(substitute)
instantiated_consequence = substitute(instantiated_consequence)
}
val p1 = apply_conditional_equality(instantiated_conditions, instantiated_consequence, p)
// iteratively instantiate all-quantified variables with expression
def instantiate_axiom(expressions: List[FOLTerm], axiom: FOLFormula, p: LKProof): LKProof = {
expressions match {
case Nil => p
case head :: tail => {
val new_axiom = instantiate(axiom, head)
val new_p = instantiate_axiom(tail, new_axiom, p)
ForallLeftRule(new_p, new_axiom, axiom, head)
}
}
}
val ax = get_axiom()
val p2 = instantiate_axiom(expressions, ax, p1)
ContractionLeftRule(p2, ax)
}
private def apply_conditional_equality(equalities: List[FOLFormula], result: FOLFormula, p: LKProof) : LKProof = {
equalities match {
case Nil => {
p // no conditions at all
}
case head :: Nil => {
val ax = Axiom(head::Nil, head::Nil)
ImpLeftRule(ax, p, head, result)
}
case head :: tail => {
val ax = Axiom(head::Nil, head::Nil)
var impl_chain = result
for (elem <- tail.reverse) {
impl_chain = Imp(elem, impl_chain)
}
val s2 = apply_conditional_equality(tail, result, p)
ImpLeftRule(ax, s2, head, impl_chain)
}
}
}
}
object UniformAssociativity3ExampleProof {
val s = "s"
val p = "+"
val x = FOLVar( "x")
val y = FOLVar( "y")
val z = FOLVar( "z")
val x1 = FOLVar( "x_1")
val x2 = FOLVar( "x_2")
val y1 = FOLVar( "y_1")
val y2 = FOLVar( "y_2")
def f1( sym: String, arg: FOLTerm ) = FOLFunction(sym, arg::Nil)
def f2( sym: String, arg1: FOLTerm, arg2: FOLTerm ) : FOLTerm = FOLFunction(sym, arg1::arg2::Nil)
def f2( arg1: FOLTerm, sym: String, arg2: FOLTerm ) : FOLTerm = f2(sym, arg1, arg2)
// Axioms
// Trans as from TransRule, possibly unify or generalise
val Trans = All(x, All(y, All(z, Imp(And(Eq( x, y) , Eq( y, z) ), Eq( x, z)))))
val Symm = All(x, Eq( x, x))
val Cs = All(x, All(y, Imp(Eq( x, y), Eq( FOLFunction(s, x::Nil), FOLFunction(s, y::Nil)))))
// TODO: port these axioms to new format using AllQuantifiedConditionalAxiomHelper
def refl_ax(): FOLFormula = All(x, refl_ax(x))
def refl_ax(x: FOLTerm): FOLFormula = All(y, refl_ax(x, y))
def refl_ax(x: FOLTerm, y: FOLTerm): FOLFormula = Imp(Eq( x, y), Eq( y, x))
// x=y -> s(x) = s(y)
def cs_ax(): FOLFormula = All(x, cs_ax(x))
def cs_ax(x: FOLTerm): FOLFormula = All(y, cs_ax(x, y))
def cs_ax(x: FOLTerm, y: FOLTerm): FOLFormula = Imp(Eq( x, y), Eq( FOLFunction(s, x::Nil), FOLFunction(s, y::Nil)))
// x1 = x2 -> y1 = y2 -> x1 + y1 = x2 + y2
val cp = new AllQuantifiedConditionalAxiomHelper(x1::x2::y1::y2::Nil, Eq( x1, x2)::Eq(y1, y2)::Nil, Eq(FOLFunction(p, x1::y1::Nil), FOLFunction(p, x2::y2::Nil)))
// Arithmetic axioms
val Ax1 = All(x, Eq(FOLFunction(p, x::Utils.numeral(0)::Nil), x))
// Forall x, y: s(x+y) = x+s(y)
def ax2_ax(): FOLFormula = All(x, All(y, ax2_ax(x, y) ))
def ax2_ax(x: FOLTerm): FOLFormula = All(y, ax2_ax(x, y) )
def ax2_ax(x: FOLTerm, y: FOLTerm): FOLFormula = Eq( f1(s, f2(x, p, y)), f2( x, p, f1(s, y)) )
def apply(n: Int): LKProof = {
assert (n>=1, "n must be >= 1")
val p = gen_proof_step(0, n)
induction_start(n, p)
}
/**
* Close off proof ending in (n + n) + 0 = n + (n + 0) |- ...
*/
def induction_start(n: Int, p0: LKProof): LKProof = {
// show both sides equal to n + n
val n_num = Utils.numeral(n)
val zero = Utils.numeral(0)
val c1 = f2(f2(n_num, p, n_num), p, zero)
val d1 = f2(n_num, p, n_num)
val e1 = f2(n_num, p, f2(n_num, p, zero))
val p1 = TransRule(c1, d1, e1, p0)
// show (n + n) + 0 = (n + n) directly via ax1
val p2 = ForallLeftRule(p1, Eq( c1, d1), Ax1, d1)
val p3 = ContractionLeftRule(p2, Ax1)
// show (n + n) = n + (n + 0)
val p4 = cp(n_num::n_num::n_num::f2(n_num, p, zero)::Nil, p3)
// show n = n and n = n + 0
val p5 = ForallLeftRule(p4, Eq( n_num, n_num), Symm, n_num)
val p6 = ContractionLeftRule(p5, Symm)
val p7 = reflect(f2(n_num, p, zero), n_num, p6)
val p8 = ForallLeftRule(p7, Eq( f2(n_num, p, zero), n_num), Ax1, n_num)
ContractionLeftRule(p8, Ax1)
}
/***
* Returns proof Pi (currently including line above and below Pi), with numerals n, i, i+1:
* (n + n) + i+1 = n + (n + i+1), Ax |- ...
* Pi
* (n + n) + i = n + (n + i), Ax |- ...
*/
def gen_proof_step(i: Int, n: Int) : LKProof = {
val n_num = Utils.numeral(n)
val i_num = Utils.numeral(i)
val ip1_num = Utils.numeral(i+1)
val a1 = f2( f2( n_num, p, n_num ), p, ip1_num )
val a2 = f2( n_num, p, f2( n_num, p, ip1_num ) )
val b1 = f2( n_num, p, f1(s, f2( p, n_num, i_num ) ) )
val p0 =
if (i+1 >= n) {
// start, add axioms
val all_axioms = Trans::cp.get_axiom()::Symm::ax2_ax()::cs_ax()::refl_ax()::Ax1::Nil
def add_ax(start: LKProof, axioms : List[FOLFormula]): LKProof = {
axioms match {
case Nil => start
case head::tail => WeakeningLeftRule(add_ax(start, tail), head)
}
}
val final_expression = Eq( a1, a2 )
val top = Axiom( final_expression::Nil, final_expression::Nil )
add_ax(top, all_axioms)
} else {
gen_proof_step(i+1, n)
}
val p1 = TransRule(a1, b1, a2, p0)
// the left side now contains
// (n + n) + s(i) = n + s(n + i) as well as
// n + s(n + i) = n + (n + s(i))
// use Cp to reduce the latter to s(n + i) = (n + s(i))
val x1_1 = n_num
val x2_1 = n_num
val y1_1 = f1(s, f2( n_num, p, i_num ))
val y2_1 = f2(n_num, p, f1(s, i_num))
val p8 = cp(x1_1::x2_1::y1_1::y2_1::Nil, p1)
// show x1 = x2 by symmetry
val p9 = ForallLeftRule(p8, Eq( x1_1, x2_1 ), Symm, n_num)
val p10 = ContractionLeftRule(p9, Symm)
// show y1 = y2 by Ax2 (i.e. s(n + i) = (n + s(i)) )
val p13 = show_by_ax2(n_num, i_num, p10)
// now we only have (n + n) + s(i) = n + s(n + i) left (c=e), reduce right hand side by Ax2 and Trans to s(n + (n + i) (c=d) )
val c1 = f2(f2(n_num, p, n_num), p, f1(s, i_num))
val d1 = f1(s, f2(n_num, p, f2(n_num, p, i_num)))
val e1 = f2(n_num, p, f1(s, f2(n_num, p, i_num)))
val p14 = TransRule(c1, d1, e1, p13)
// show d=e by Ax2
val p15 = show_by_ax2(n_num, f2(n_num, p, i_num), p14)
// next goal: reduce (n + n) + s(i) = s(n + (n + i) to (n + n) + s(i) = s( (n + n) + i) using the IH, Trans and Cs)
val c2 = f2(f2(n_num, p, n_num), p, f1(s, i_num))
val d2 = f1(s, f2(f2(n_num, p, n_num), p, i_num))
val e2 = d1
val p16 = TransRule(c2, d2, e2, p15)
val p17 = show_by_cs(f2(f2(n_num, p, n_num), p, i_num), f2(n_num, p, f2(n_num, p, i_num)), p16)
// now we have:
// (n + n) + s(i) = s( (n + n) + i)
// as well as
// (n + n) + i = n + (n + i)
// -> use Ax2
// use reflection to match definition of ax2 afterwards
val p18 = reflect( d2, c2, p17 )
val p19 = show_by_ax2( f2(n_num, p, n_num), i_num, p18)
p19
// we end up with the IH (n + n) + i = n + (n + i)
}
def show_by_ax2(x: FOLTerm, y: FOLTerm, p: LKProof): LKProof = {
val p1 = ForallLeftRule(p, ax2_ax(x, y), ax2_ax(x), y)
val p2 = ForallLeftRule(p1, ax2_ax(x), ax2_ax(), x)
ContractionLeftRule(p2, ax2_ax())
}
def show_by_cs(x: FOLTerm, y: FOLTerm, p: LKProof): LKProof = {
val p1 = apply_conditional_equality( Eq( x, y)::Nil, Eq( FOLFunction(s, x::Nil), FOLFunction(s, y::Nil)), p)
val p2 = ForallLeftRule(p1, cs_ax(x, y), cs_ax(x), y)
val p3 = ForallLeftRule(p2, cs_ax(x), cs_ax(), x)
ContractionLeftRule(p3, cs_ax())
}
/**
* Takes a proof s2 with end-sequent of the form
* (x=y), ... |- ...
* and return one with end-sequent of the form
* (y=x), ... |- ...
*/
def reflect(x: FOLTerm, y: FOLTerm, p: LKProof): LKProof = {
val p1 = apply_conditional_equality( Eq(x,y)::Nil, Eq(y,x), p)
val p2 = ForallLeftRule(p1, refl_ax(x, y), refl_ax(x), y)
val p3 = ForallLeftRule(p2, refl_ax(x), refl_ax(), x)
ContractionLeftRule(p3, refl_ax())
}
def apply_conditional_equality(equalities: List[FOLFormula], result: FOLFormula, p: LKProof) : LKProof = {
equalities match {
case Nil => p
case head :: Nil => {
val ax = Axiom(head::Nil, head::Nil)
ImpLeftRule(ax, p, head, result)
}
case head :: tail => {
val ax = Axiom(head::Nil, head::Nil)
var impl_chain = result
for (elem <- tail.reverse) {
impl_chain = Imp(elem, impl_chain)
}
val s2 = apply_conditional_equality(tail, result, p)
ImpLeftRule(ax, s2, head, impl_chain)
}
}
}
}
/**
* Proof of f(n) = g(n, 1), where f is the head recursive and g the tail recursive formulation of the factorial function
*/
object FactorialFunctionEqualityExampleProof {
val p = "+"
val m = "*"
val s = "s"
val f = "f"
val g = "g"
val x = FOLVar("x")
val y = FOLVar("y")
val z = FOLVar("z")
def f1( sym: String, arg: FOLTerm ) = FOLFunction(sym, arg::Nil)
def f2( sym: String, arg1: FOLTerm, arg2: FOLTerm ) : FOLTerm = FOLFunction(sym, arg1::arg2::Nil)
def f2( arg1: FOLTerm, sym: String, arg2: FOLTerm ) : FOLTerm = f2(sym, arg1, arg2)
val f_ax_1 = Eq( f1(f, Utils.numeral(0)), f1(s, Utils.numeral(0)))
val f_ax_2 = parse.fol("Forall x =(f(s(x)), *(s(x) , f(x)))")
val g_ax_1 = new AllQuantifiedConditionalAxiomHelper(y::Nil, Nil, Eq( y, f2(g, Utils.numeral(0), y)))
val g_ax_2 = parse.fol("Forall x Forall y =( g(s(x), y), g(x, *(y , s(x))) )")
val g_compat_2 = new AllQuantifiedConditionalAxiomHelper(x::y::z::Nil, Eq( y, z)::Nil, Eq( f2(g, x, y), f2(g, x, z)))
val trans_axiom = new AllQuantifiedConditionalAxiomHelper(x::y::z::Nil, Eq( x, y)::Eq( y, z)::Nil, Eq( x, z))
val symm_axiom = All(x, Eq( x, x))
val refl_axiom = new AllQuantifiedConditionalAxiomHelper(x::y::Nil, Eq( x, y)::Nil, Eq( y, x))
val compat_mul_axiom = new AllQuantifiedConditionalAxiomHelper(x::y::z::Nil, Eq( x, y)::Nil, Eq( f2(z, m, x), f2(z, m, y)))
val assoc_mul_axiom = new AllQuantifiedConditionalAxiomHelper(x::y::z::Nil, Nil, Eq( f2(x, m, f2(y, m, z)), f2(f2(x, m, y), m, z)))
val mul_neutral_axiom = new AllQuantifiedConditionalAxiomHelper(x::Nil, Nil, Eq( f2(x, m, Utils.numeral(1)), x))
// this second axiom saves us from adding commutativity of multiplication
val mul_neutral_axiom_2 = new AllQuantifiedConditionalAxiomHelper(x::Nil, Nil, Eq( f2(Utils.numeral(1), m, x), x))
def apply(n :Int): LKProof = induction_steps(n)
def induction_steps(n: Int): LKProof = {
val axiom_formulae = Eq( f1(f, Utils.numeral(n)), f2(g, Utils.numeral(n), Utils.numeral(1)))::Nil
val axiom : LKProof = Axiom(axiom_formulae, axiom_formulae)
// add axioms
val all_axioms = List[FOLFormula](f_ax_1, f_ax_2, g_ax_1.get_axiom(), g_ax_2, symm_axiom, refl_axiom.get_axiom(),
trans_axiom.get_axiom(), compat_mul_axiom.get_axiom(), assoc_mul_axiom.get_axiom(), g_compat_2.get_axiom(),
mul_neutral_axiom.get_axiom(), mul_neutral_axiom_2.get_axiom())
val p1 = all_axioms.foldLeft( axiom )( (proof, elem) => WeakeningLeftRule(proof, elem))
val n_num = Utils.numeral(n)
/**
* Returns (( ([start_value*]n)*(n-1) ) * ... * ) k
*/
def get_partial_factorial_term(n: Int, k: Int, start_value: Option[FOLTerm] = None): FOLTerm = {
if (n <= k) {
if (n == k) {
start_value match {
case Some(value) => f2(value, m, Utils.numeral(n))
case None => Utils.numeral(n)
}
} else throw new Exception("k larger than n in partial factorial")
} else {
f2(m, get_partial_factorial_term(n, k+1, start_value), Utils.numeral(k))
}
}
def induction_step_rec(p0: LKProof, k: Int): LKProof = {
val k_num = Utils.numeral(k)
val zero = Utils.numeral(0)
val one = Utils.numeral(1)
val part_fac = if (n==k) one else get_partial_factorial_term(n, k+1)
val part_fac_next = get_partial_factorial_term(n, k)
val f_k_term = if (n == k) f1(f, k_num) // f(k)
else f2(m, part_fac, f1(f, k_num)) // part_fac * f(k)
val g_k_term = f2(g, k_num, part_fac)
if (k == 0) {
// we have: n! * f(0) = g(0, ( ... (( (1 * n) * (n-1) ) * (n-2) ) * ... ) * 1 )
// use f(0) = s(0)
val p1 = trans_axiom( List( f_k_term, f2(part_fac, m, f1(s, zero)), g_k_term), p0 )
val p2 = compat_mul_axiom( List( f1(f, zero), f1(s, zero), part_fac), p1 )
val p3 = ContractionLeftRule( p2, f_ax_1 )
// use g(0, y) = y
val p4 = trans_axiom( List(f2(part_fac, m, f1(s, zero)), part_fac, g_k_term), p3)
val p5 = g_ax_1( part_fac::Nil, p4 )
// the formula actually says n! * 1 = n!, we have to get rid of the 1
val p6 = trans_axiom( List(f2(part_fac, m, one), part_fac, part_fac), p5 )
val p7 = mul_neutral_axiom( List(part_fac), p6)
val p8 = ForallLeftRule(p7, Eq( part_fac, part_fac), symm_axiom, part_fac)
val p9 = ContractionLeftRule(p8, symm_axiom)
p9
} else {
// lhs contains part_fac * f(k) = g(k, part_fac)
val km1_num = Utils.numeral(k-1) // must not be evaluated for k == 0
val f_km1_term = f2(m, part_fac_next, f1(f, km1_num)) // (part_fac * k) * f(k-1)
val g_km1_term = f2(g, km1_num, part_fac_next)
// first step: decompose f: part_fac * k * f(k-1) = g(k, 1*part_fac)
val p1 = trans_axiom( List(f_k_term, f_km1_term, g_k_term), p0)
val p3 =
if (n==k) {
// use axiom directly, part_fac is empty
val p1_0 = ForallLeftRule(p1, Eq( f_k_term, f_km1_term), f_ax_2, km1_num)
ContractionLeftRule(p1_0, f_ax_2)
} else {
// the antecedent contains something along the lines of:
// 4*f(3) = (4*3) * f(2) or
// (4*3)*f(2) = ((4*3)*2) * f(1)
// we however need the last part exposed and grouped to be able to use compat & f_ax2 to show it, like in: 4*f(3) = 4* (3*f(2))
// use Trans to expose it and show it, other part can be shown by associativity
// step in between (=yTrans):
// 4 * (3*f(2)) or
// (4*3) * (2*f(1))
val yTrans = f2(part_fac, m, f2(k_num, m, f1(f, km1_num)))
val p1_0 = trans_axiom( f_k_term::yTrans::f_km1_term::Nil, p1)
// show by compat, then f_ax_2: part_fac * f(k) = part_fac * (k * f(k-1))
val f_k = f1(f, k_num)
val k_f_km1 = f2(k_num, m, f1(f, km1_num))
val p1_1 = compat_mul_axiom( List(f_k, k_f_km1, part_fac), p1_0)
val p1_2 = ForallLeftRule(p1_1, Eq( f_k, k_f_km1), f_ax_2, km1_num)
val p1_3 = ContractionLeftRule(p1_2, f_ax_2)
// show by assoc: part_fac * (k * f(k-1)) = (part_fac * k) * f(k-1)
val p1_4 = assoc_mul_axiom( List(part_fac, k_num, f1(f, km1_num)), p1_3)
p1_4
}
// now transform k * f(k-1) = g(k, part_fac) to k * f(k-1) = g(k-1, part_fac * k)
val p4 = trans_axiom( List(f_km1_term, g_km1_term, g_k_term), p3)
// show g(k, part_fac) = g(k-1, part_fac*k) (need to use reflection to get to this form first)
val p4_2 = refl_axiom( g_k_term::g_km1_term::Nil, p4 )
val p5 =
if (n==k) {
// g is initially called with a 1 as second argument, but we want to get rid of it to make the final result equal to the one f produces
// use trans to split into g-axiom part and part where the two expressions only differ by this extra one
val g_intermed = f2(g, km1_num, f2(one, m, part_fac_next))
val p5_1 = trans_axiom( g_k_term::g_intermed::g_km1_term::Nil, p4_2 )
// show g(n, 1) = g(n-1, 1*n) by g_ax_2
val intermed = All(y, Eq( f2(g, k_num, y), f2(g, km1_num, f2(y, m, k_num))))
val p5_2 = ForallLeftRule(p5_1, Eq( g_k_term, g_intermed), intermed, one)
val p5_3 = ForallLeftRule(p5_2, intermed, g_ax_2, km1_num)
val p5_4 = ContractionLeftRule(p5_3, g_ax_2)
// show g(n-1, 1*n) = g(n-1, n) by g_compat_2
val p5_5 = g_compat_2( List(km1_num, f2(one, m, k_num), k_num), p5_4)
// show 1 * k = k
val p5_6 = mul_neutral_axiom_2( List(k_num), p5_5 )
p5_6
} else {
val intermed = All(y, Eq( f2(g, f1(s, km1_num), y), f2(g, km1_num, f2(m, y, f1(s, km1_num)))))
val p6 = ForallLeftRule(p4_2, Eq( g_k_term, g_km1_term), intermed, part_fac)
val p7 = ForallLeftRule(p6, intermed, g_ax_2, km1_num)
val p8 = ContractionLeftRule(p7, g_ax_2)
p8
}
induction_step_rec(p5, k-1)
}
}
induction_step_rec(p1, n)
}
}
| gisellemnr/gapt | examples/ProofSequences.scala | Scala | gpl-3.0 | 47,330 |
package org.scalajs.testsuite.utils
object AssertThrows {
/** Backport implementation of Assert.assertThrows to be used until JUnit 4.13 is
* released. See org.junit.Assert.scala in jUnitRuntime.
*/
private def assertThrowsBackport(expectedThrowable: Class[_ <: Throwable],
runnable: ThrowingRunnable): Unit = {
expectThrowsBackport(expectedThrowable, runnable)
}
/** Backport implementation of Assert.expectThrows to be used until JUnit 4.13 is
* released. See org.junit.Assert.scala in jUnitRuntime.
*/
private def expectThrowsBackport[T <: Throwable](expectedThrowable: Class[T],
runnable: ThrowingRunnable): T = {
try {
runnable.run()
val message =
s"expected ${expectedThrowable.getSimpleName} to be thrown," +
" but nothing was thrown"
throw new AssertionError(message)
} catch {
case actualThrown: Throwable =>
if (expectedThrowable.isInstance(actualThrown)) {
actualThrown.asInstanceOf[T]
} else {
val mismatchMessage = "unexpected exception type thrown;" +
expectedThrowable.getSimpleName + " " + actualThrown.getClass.getSimpleName
val assertionError = new AssertionError(mismatchMessage)
assertionError.initCause(actualThrown)
throw assertionError
}
}
}
/** Backport implementation of Assert.ThrowingRunnable to be used until
* JUnit 4.13 is released. See org.junit.Assert.scala in jUnitRuntime.
*/
private trait ThrowingRunnable {
def run(): Unit
}
private def throwingRunnable(code: => Unit): ThrowingRunnable = {
new ThrowingRunnable {
def run(): Unit = code
}
}
def assertThrows[T <: Throwable, U](expectedThrowable: Class[T], code: => U): Unit =
assertThrowsBackport(expectedThrowable, throwingRunnable(code.asInstanceOf[Unit]))
def expectThrows[T <: Throwable, U](expectedThrowable: Class[T], code: => U): T =
expectThrowsBackport(expectedThrowable, throwingRunnable(code.asInstanceOf[Unit]))
}
| scala-js/scala-js-java-logging | testSuite/shared/src/test/scala/org/scalajs/testsuite/utils/AssertThrows.scala | Scala | bsd-3-clause | 2,048 |
/*
* Copyright (c) 2014-2021 by The Monix Project Developers.
* See the project homepage at: https://monix.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package monix.eval
import monix.execution.exceptions.DummyException
import scala.util.{Failure, Success}
import scala.concurrent.duration._
object TaskAsyncSuite extends BaseTestSuite {
test("Task.never should never complete") { implicit s =>
val t = Task.never[Int]
val f = t.runToFuture
s.tick(365.days)
assertEquals(f.value, None)
}
test("Task.async should execute") { implicit s =>
val task = Task.async0[Int] { (ec, cb) =>
ec.execute { () =>
cb.onSuccess(1)
}
}
val f = task.runToFuture
assertEquals(f.value, None)
s.tick()
assertEquals(f.value, Some(Success(1)))
}
test("Task.async should signal errors in register") { implicit s =>
val ex = DummyException("dummy")
val task = Task.async0[Int]((_, _) => throw ex)
val result = task.runToFuture; s.tick()
assertEquals(result.value, Some(Failure(ex)))
assertEquals(s.state.lastReportedError, null)
}
test("Task.async should be stack safe") { implicit s =>
def signal(n: Int) = Task.async0[Int]((_, cb) => cb.onSuccess(n))
def loop(n: Int, acc: Int): Task[Int] =
signal(1).flatMap { x =>
if (n > 0) loop(n - 1, acc + x)
else Task.now(acc)
}
val f = loop(10000, 0).runToFuture; s.tick()
assertEquals(f.value, Some(Success(10000)))
}
test("Task.async works for immediate successful value") { implicit sc =>
val task = Task.async[Int](_.onSuccess(1))
assertEquals(task.runToFuture.value, Some(Success(1)))
}
test("Task.async works for immediate error") { implicit sc =>
val e = DummyException("dummy")
val task = Task.async[Int](_.onError(e))
assertEquals(task.runToFuture.value, Some(Failure(e)))
}
test("Task.async is memory safe in flatMap loops") { implicit sc =>
def signal(n: Int): Task[Int] = Task.async(_.onSuccess(n))
def loop(n: Int, acc: Int): Task[Int] =
signal(n).flatMap { n =>
if (n > 0) loop(n - 1, acc + 1)
else Task.now(acc)
}
val f = loop(10000, 0).runToFuture; sc.tick()
assertEquals(f.value, Some(Success(10000)))
}
test("Task.async0 works for immediate successful value") { implicit sc =>
val task = Task.async0[Int]((_, cb) => cb.onSuccess(1))
assertEquals(task.runToFuture.value, Some(Success(1)))
}
test("Task.async0 works for async successful value") { implicit sc =>
val f = Task
.async0[Int]((s, cb) => s.execute(() => cb.onSuccess(1)))
.runToFuture
sc.tick()
assertEquals(f.value, Some(Success(1)))
}
test("Task.async0 works for async error") { implicit sc =>
val e = DummyException("dummy")
val f = Task
.async0[Int]((s, cb) => s.execute(() => cb.onError(e)))
.runToFuture
sc.tick()
assertEquals(f.value, Some(Failure(e)))
}
test("Task.async0 is memory safe in synchronous flatMap loops") { implicit sc =>
def signal(n: Int): Task[Int] = Task.async0((_, cb) => cb.onSuccess(n))
def loop(n: Int, acc: Int): Task[Int] =
signal(n).flatMap { n =>
if (n > 0) loop(n - 1, acc + 1)
else Task.now(acc)
}
val f = loop(10000, 0).runToFuture; sc.tick()
assertEquals(f.value, Some(Success(10000)))
}
test("Task.async0 is memory safe in async flatMap loops") { implicit sc =>
def signal(n: Int): Task[Int] =
Task.async0((s, cb) => s.execute(() => cb.onSuccess(n)))
def loop(n: Int, acc: Int): Task[Int] =
signal(n).flatMap { n =>
if (n > 0) loop(n - 1, acc + 1)
else Task.now(acc)
}
val f = loop(10000, 0).runToFuture; sc.tick()
assertEquals(f.value, Some(Success(10000)))
}
}
| monifu/monifu | monix-eval/shared/src/test/scala/monix/eval/TaskAsyncSuite.scala | Scala | apache-2.0 | 4,332 |
package com.github.tminglei.slickpg
import slick.driver.PostgresDriver
import slick.jdbc.{PositionedResult, JdbcType}
/** simple inet string wrapper */
case class InetString(value: String) {
lazy val isIPv6 = value.contains(":")
lazy val address = value.split("/")(0)
lazy val masklen: Int = {
val parts = value.split("/")
if (parts.length > 1) parts(1).toInt
else if (isIPv6) 128
else 32
}
}
/** simple mac addr string wrapper */
case class MacAddrString(value: String)
/**
* simple inet/macaddr support; if all you want is just getting from / saving to db, and using pg json operations/methods, it should be enough
*/
trait PgNetSupport extends net.PgNetExtensions with utils.PgCommonJdbcTypes { driver: PostgresDriver =>
import driver.api._
/// alias
trait NetImplicits extends SimpleNetImplicits
trait SimpleNetImplicits {
implicit val simpleInetTypeMapper: JdbcType[InetString] =
new GenericJdbcType[InetString]("inet",
(v) => InetString(v),
(v) => v.value,
hasLiteralForm = false
)
implicit val simpleMacAddrTypeMapper: JdbcType[MacAddrString] =
new GenericJdbcType[MacAddrString]("macaddr",
(v) => MacAddrString(v),
(v) => v.value,
hasLiteralForm = false
)
implicit def simpleInetColumnExtensionMethods(c: Rep[InetString]) = {
new InetColumnExtensionMethods[InetString, InetString](c)
}
implicit def simpleInetOptionColumnExtensionMethods(c: Rep[Option[InetString]]) = {
new InetColumnExtensionMethods[InetString, Option[InetString]](c)
}
implicit def simpleMacAddrColumnExtensionMethods(c: Rep[MacAddrString]) = {
new MacAddrColumnExtensionMethods[MacAddrString, MacAddrString](c)
}
implicit def simpleMacAddrOptionColumnExtensionMethods(c: Rep[Option[MacAddrString]]) = {
new MacAddrColumnExtensionMethods[MacAddrString, Option[MacAddrString]](c)
}
}
trait SimpleNetPlainImplicits {
import scala.reflect.classTag
import utils.PlainSQLUtils._
// to support 'nextArray[T]/nextArrayOption[T]' in PgArraySupport
{
addNextArrayConverter((r) => utils.SimpleArrayUtils.fromString(InetString.apply)(r.nextString()))
addNextArrayConverter((r) => utils.SimpleArrayUtils.fromString(MacAddrString.apply)(r.nextString()))
}
// used to support code gen
if (driver.isInstanceOf[ExPostgresDriver]) {
driver.asInstanceOf[ExPostgresDriver].bindPgTypeToScala("inet", classTag[InetString])
driver.asInstanceOf[ExPostgresDriver].bindPgTypeToScala("macaddr", classTag[MacAddrString])
}
implicit class PgNetPositionedResult(r: PositionedResult) {
def nextIPAddr() = nextIPAddrOption().orNull
def nextIPAddrOption() = r.nextStringOption().map(InetString)
def nextMacAddr() = nextMacAddrOption().orNull
def nextMacAddrOption() = r.nextStringOption().map(MacAddrString)
}
/////////////////////////////////////////////////////////////////
implicit val getIPAddr = mkGetResult(_.nextIPAddr())
implicit val getIPAddrOption = mkGetResult(_.nextIntOption())
implicit val setIPAddr = mkSetParameter[InetString]("inet", _.value)
implicit val setIPAddrOption = mkOptionSetParameter[InetString]("inet", _.value)
implicit val getMacAddr = mkGetResult(_.nextMacAddr())
implicit val getMacAddrOption = mkGetResult(_.nextMacAddrOption())
implicit val setMacAddr = mkSetParameter[MacAddrString]("macaddr", _.value)
implicit val setMacAddrOption = mkOptionSetParameter[MacAddrString]("macaddr", _.value)
}
}
| vikraman/slick-pg | src/main/scala/com/github/tminglei/slickpg/PgNetSupport.scala | Scala | bsd-2-clause | 3,605 |
package br.com.caelum.hibernatequerydsl
import org.junit.Ignore
import scala.reflect.BeanProperty
import org.hibernate.cfg.Configuration
import br.com.caelum.hibernatequerydsl.PimpedSession._
import br.com.caelum.hibernatequerydsl.Expression._
import org.hibernate.Session
import org.hibernate.criterion.Order._
import org.junit.{ Test, Before, After }
import org.junit.Assert._
import br.com.caelum.hibernatequerydsl.TypeSafeCondition._
class PimpedClassTest {
private var session: Session = _
private val userToQuery = new User
private val addressToQuery = new Address
@Before
def setUp {
val cfg = new Configuration();
//cfg.configure().setProperty("hibernate.connection.url", "jdbc:hsqldb:mem:mydvdsDB");
session = cfg.configure().buildSessionFactory().openSession();
session.beginTransaction();
}
/**
* Undoing all changes to database
* @throws Exception
*/
@After
def tearDown {
if (session != null && session.getTransaction().isActive()) {
session.getTransaction().rollback();
}
}
private def newUser(name: String = null, age: Int = 0) = {
val user = new User
user setName name
user setAge age
session.save(user)
user
}
private def newAddress(street: String, user: User) = {
val address = new Address
address setStreet street
address setUser user
session.save(address)
address
}
@Test
def shouldListAllObjects {
newUser("alberto")
newUser("alberto")
val users = session.all[User]
assertEquals(2, users size)
}
@Test
def shouldVerifyIfExists {
newUser("alberto")
assertTrue(session.exists[User])
}
@Test
def shouldVerifyIfNotExists {
assertFalse(session.exists[User])
}
@Test
def shouldCount {
newUser("alberto")
newUser("alberto")
assertEquals(2, session.count[User])
}
@Test
def shouldGetFirstBasedOnId {
val alberto = newUser("alberto")
newUser("alberto2")
val userRetrieved = session.first[User]
assertEquals(alberto, userRetrieved)
}
@Test
def shouldGetFirstBasedOnSomeField {
val alberto = newUser("alberto")
val joao = newUser("joao")
val userRetrieved = session.from[User].orderBy(_.getName).desc.first[User]
assertEquals(joao, userRetrieved)
}
@Test
def shouldGetTheLastBasedOnId {
val alberto = newUser("alberto")
val joao = newUser("joao")
val userRetrieved = session.last[User]
assertEquals(joao, userRetrieved)
}
@Test
def shouldGetTheLastDescOrderedOnSomeField {
val alberto = newUser("alberto")
val joao = newUser("joao")
val userRetrieved = session.from[User].orderBy(_.getName).desc.last[User]
assertEquals(alberto, userRetrieved)
}
@Test
def shouldGetTheLastDescOrderedOnSomeFields {
val alberto = newUser("alberto")
val joao = newUser("joao")
val userRetrieved = session.from[User].orderBy(_.getName).desc.last[User]
assertEquals(alberto, userRetrieved)
}
@Test
def shouldGetTheLastAscOrderedOnSomeField {
val alberto = newUser("alberto")
val joao = newUser("joao")
val userRetrieved = session.from[User].orderBy(_.getName).asc.last[User]
assertEquals(joao, userRetrieved)
}
@Test
def shouldGetTheLastAscAndDescOrderedOnSomeFields {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto", 20)
val userRetrieved = session.from[User].orderBy(_.getName).asc.orderBy(_.getAge).desc.last[User]
assertEquals(alberto, userRetrieved)
}
@Test
def shouldDoASimpleJoin {
val alberto = newUser("alberto")
val address = newAddress("rua da casa de nao sei quem", alberto)
val address2 = newAddress("rua da casa de nao sei quem", alberto)
val list = session.from[Address].join(_.getUser).asList[Address]
assertEquals(2, list size)
}
@Test
def shouldDoASimpleJoinBasedOnSomeField1 {
val alberto = newUser("alberto")
val alberto2 = newUser("alberto2")
val address = newAddress("rua da casa de nao sei quem", alberto)
val address2 = newAddress("rua da casa de nao sei quem", alberto2)
//pimpedCondition(_.getName).\==("alberto2")
val list = session.from[Address].join(_.getUser).where(_.getName.\==("alberto2")).asList[Address]
assertEquals(1, list size)
}
@Test
def shouldDoASimpleJoinBasedOnSomeFieldsWithAnd {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val address = newAddress("rua da casa de nao sei quem", alberto)
val address2 = newAddress("rua da casa de nao sei quem", alberto2)
val list = session.from[Address].join(_.getUser).where(_.getName \== ("alberto2")).where(_.getAge \== alberto2.getAge).asList[Address]
assertEquals(1, list size)
}
@Test
def shouldDoASimpleQueryBasedOnSomeFields1 {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 30)
val alberto4 = newUser("alberto4", 40)
val list = session.from[User].where(_.getAge \> alberto.getAge).asList[User]
assertEquals(3, list size)
}
@Test
def shouldDoASimpleQueryBasedOnSomeFields2 {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 30)
val alberto4 = newUser("alberto4", 40)
var list = session.from[User].where(_.getAge \>= alberto.getAge).asList[User]
assertEquals(4, list size)
}
@Test
def shouldDoASimpleQueryBasedOnSomeFields3 {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 30)
val alberto4 = newUser("alberto4", 40)
val list = session.from[User].where(_.getAge \< alberto2.getAge).asList[User]
assertEquals(1, list size)
}
@Test
def shouldDoASimpleQueryBasedOnSomeFields4 {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 30)
val alberto4 = newUser("alberto4", 40)
val list = session.from[User].where(_.getAge \<= alberto2.getAge).asList[User]
assertEquals(2, list size)
}
@Test
def shouldDoASimpleQueryBasedOnSomeFields5 {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 30)
val alberto4 = newUser("outrute", 40)
val list = session.from[User].where(_.getAge \>= alberto2.getAge).where(_.getName like "alberto").asList[User]
assertEquals(2, list size)
}
@Test
def shouldDoASimpleQueryBasedOnSomeFields6 {
val alberto = newUser(null, 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 30)
val alberto4 = newUser("outrute", 40)
val list = session.from[User].where(_.getName isNull).asList[User]
assertEquals(1, list size)
}
@Test
def shouldDoASimpleQueryBasedOnSomeFields7 {
val alberto = newUser(null, 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 30)
val alberto4 = newUser("outrute", 40)
val list = session.from[User].where(_.getName isNotNull).asList[User]
assertEquals(3, list size)
}
@Test
def shouldDoASimpleQueryBasedOnSomeFields8 {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 30)
val alberto4 = newUser("alberto4", 40)
val list = session.from[User].where(_.getName \!= "alberto").asList[User]
assertEquals(3, list size)
}
@Test
def shouldExecuteJustASimpleHQL {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 30)
val alberto4 = newUser("outrute", 40)
val list = session.query("from User").asList[User]
assertEquals(4, list size)
}
@Test
def shouldAssingParametersForHQL {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 30)
val alberto4 = newUser("outrute", 40)
val list = session.query("from User where name=:name and age =:age").withParams("name" -> "alberto", "age" -> alberto.getAge).asList[User]
assertEquals(1, list size)
}
@Test
def shouldGroupUserByStreet {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 15)
val alberto4 = newUser("alberto4", 30)
val address = newAddress("x", alberto)
val address2 = newAddress("x", alberto2)
val address3 = newAddress("y", alberto3)
val address4 = newAddress("y", alberto4)
val list = session.from[User].join(_.getAddresses).groupBy("addresses.street").asList[User]
assertEquals(2, list size)
}
@Test
def shouldGroupUserByStreetWithAvgAge {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 15)
val alberto4 = newUser("alberto4", 30)
val address = newAddress("x", alberto)
val address2 = newAddress("x", alberto2)
val address3 = newAddress("y", alberto3)
val address4 = newAddress("y", alberto4)
val list = session.from[User].join(_.getAddresses).groupBy("addresses.street").avg[User](_.getAge).asList[Array[Object]]
assertEquals(2, list size)
assertEquals(15.0, list.head(1))
}
@Test
def shouldGroupUserByStreetWithSumAge {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 15)
val alberto4 = newUser("alberto4", 30)
val address = newAddress("x", alberto)
val address2 = newAddress("x", alberto2)
val address3 = newAddress("y", alberto3)
val address4 = newAddress("y", alberto4)
val list = session.from[User].join(_.getAddresses).groupBy("addresses.street").sum[User](_.getAge).asList[Array[Object]]
assertEquals(2, list size)
assertEquals(30L, list.head(1))
}
@Test
def shouldGroupUserByStreetWithCountAge {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 15)
val alberto4 = newUser("alberto4", 30)
val address = newAddress("x", alberto)
val address2 = newAddress("x", alberto2)
val address3 = newAddress("y", alberto3)
val address4 = newAddress("y", alberto4)
val list = session.from[User].join(_.getAddresses).groupBy("addresses.street").count[User](_.getAge).asList[Array[Object]]
assertEquals(2, list size)
assertEquals(2L, list.head(1))
}
@Test
def shouldListJustUsersWithAddresses {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 15)
val alberto4 = newUser("alberto4", 30)
val address = newAddress("x", alberto)
val address2 = newAddress("x", alberto2)
val address3 = newAddress("y", alberto3)
val list = session.from[User].where.has(_.getAddresses).asList[User]
assertEquals(3, list size)
}
@Test
def shouldListJustUsersWithAddressesFilteringBySomeAttribute {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 15)
val alberto4 = newUser("alberto4", 30)
val address = newAddress("x", alberto)
val address2 = newAddress("x", alberto2)
val address3 = newAddress("y", alberto3)
import br.com.caelum.hibernatequerydsl.TypeUnsafe._
val list = session.from[User].includes(_.getAddresses).where("addresses.street" equal "y").asList[User]
assertEquals(1, list size)
}
@Test
def shouldListJustUsersWithAddressesFilteringBySomeAttribute2 {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 15)
val alberto4 = newUser("alberto4", 30)
val address = newAddress("x", alberto)
val address2 = newAddress("x", alberto2)
val address3 = newAddress("y", alberto3)
import br.com.caelum.hibernatequerydsl.TypeUnsafe._
val list = session.from[User].includes(_.getAddresses).where("addresses.street" equal "y").asList[User]
assertEquals(1, list size)
}
@Test
def shouldSelectByFields {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 15)
val alberto4 = newUser("alberto4", 30)
val address = newAddress("x", alberto)
val address2 = newAddress("x", alberto2)
val address3 = newAddress("y", alberto3)
val list = session.from[User].select("name").asList[String]
assertEquals("alberto", list.head)
}
@Test
def shouldSelectDistinctedObjects {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto", 20)
val alberto3 = newUser("alberto", 15)
val alberto4 = newUser("alberto4", 30)
val list = session.from[User].distinct(_.getName).asList[String]
assertEquals(2, list.size)
}
@Ignore //TODO fazer o resulttransformer funcionar.
def shouldTransformArrayToMyResultTransformer {
val alberto = newUser("alberto", 10)
val alberto2 = newUser("alberto2", 20)
val alberto3 = newUser("alberto3", 15)
val alberto4 = newUser("alberto4", 30)
val address = newAddress("x", alberto)
val address2 = newAddress("x", alberto2)
val address3 = newAddress("y", alberto3)
val list = session.from[User].join(_.getAddresses).select("name").selectWithAliases("addresses.street".alias("street")).transformToBean[StreetWithName].asList
assertEquals("alberto", list.head.getName)
}
}
| asouza/hibernate-query-dsl | src/test/scala/br/com/caelum/hibernatequerydsl/PimpedSessionTest.scala | Scala | lgpl-2.1 | 13,499 |
package com.twitter.finagle.exp.mysql
import com.twitter.finagle.exp.mysql.transport.BufferWriter
import java.util.logging.Logger
import language.implicitConversions
/**
* A value of type `A` can implicitly convert to a `Parameter` if an evidence `CanBeParameter[A]` is
* available in scope. This type is not to be instantiated in any other manner.
*/
sealed trait Parameter {
type A
def value: A
def evidence: CanBeParameter[A]
final def writeTo(writer: BufferWriter): Unit = {
evidence.write(writer, value)
}
final def size: Int = evidence.sizeOf(value)
final def typeCode: Short = evidence.typeCode(value)
}
/**
* Note: There is a Java-friendly API for this object: [[com.twitter.finagle.exp.mysql.Parameters]].
*/
object Parameter {
implicit def wrap[_A](_value: _A)(implicit _evidence: CanBeParameter[_A]): Parameter = {
if (_value == null) {
NullParameter
} else {
new Parameter {
type A = _A
def value: A = _value
def evidence: CanBeParameter[A] = _evidence
override def toString = s"Parameter($value)"
}
}
}
private val log = Logger.getLogger("finagle-mysql")
/**
* This converts the compile time error we would get with `wrap` into
* a run time error. This method should only be used to ease migration
* from Any to Parameter. It maintains the previous behavior where
* we log a failure to encode and transparently write a SQL NULL to
* the wire.
*/
def unsafeWrap(value: Any): Parameter = value match {
case v: String => wrap(v)
case v: Boolean => wrap(v)
case v: Byte => wrap(v)
case v: Short => wrap(v)
case v: Int => wrap(v)
case v: Long => wrap(v)
case v: Float => wrap(v)
case v: Double => wrap(v)
case v: Array[Byte] => wrap(v)
case v: Value => wrap(v)
case v: java.sql.Timestamp => wrap(v)
case v: java.sql.Date => wrap(v)
case null => Parameter.NullParameter
case v =>
// Unsupported type. Write the error to log, and write the type as null.
// This allows us to safely skip writing the parameter without corrupting the buffer.
log.warning(s"Unknown parameter ${v.getClass.getName} will be treated as SQL NULL.")
Parameter.NullParameter
}
object NullParameter extends Parameter {
type A = Null
def value = null
def evidence = CanBeParameter.nullCanBeParameter
}
}
/**
* A Java adaptation of the [[com.twitter.finagle.exp.mysql.Parameter]] companion object.
*/
object Parameters {
def nullParameter: Parameter = Parameter.NullParameter
def unsafeWrap(value: Any): Parameter = Parameter.unsafeWrap(value)
//TODO: create an accessor to Parameter.wrap, so type errors are caught at compile time.
} | sveinnfannar/finagle | finagle-mysql/src/main/scala/com/twitter/finagle/mysql/Parameter.scala | Scala | apache-2.0 | 2,743 |
package providers
import java.time.LocalDateTime
import java.time.temporal.ChronoUnit
import java.util.TimeZone
import com.feth.play.module.pa.PlayAuthenticate
import javax.inject.{Inject, Singleton}
import play.inject.ApplicationLifecycle
import play.Environment
import com.feth.play.module.pa.providers.cookie._
import com.feth.play.module.pa.user.AuthUser
import dao.DaoContext
import services.UserService
import java.time._
import play.i18n.Lang
@Singleton
class MyCookieAuthProvider @Inject()(implicit
env: Environment,
auth: PlayAuthenticate,
lifecycle: ApplicationLifecycle,
val userService: UserService,
daoContext: DaoContext) extends CookieAuthProvider(auth, lifecycle, env) {
import helpers.AwaitHelpers._
import CookieAuthProvider._
//-------------------------------------------------------------------
// public
//-------------------------------------------------------------------
override def save(cookieAuthUser: CookieAuthUser, loginUser: AuthUser): Unit = {
auth.getUserService.link(loginUser, cookieAuthUser)
val userRow = userService.findByAuthUser(loginUser).get
daoContext.cookieTokenSeriesDao.create(userRow, cookieAuthUser.getSeries, cookieAuthUser.getToken)
}
//-------------------------------------------------------------------
override def deleteSeries(authUser: AuthUser, series: String): Unit = {
val linkedAccountRow = daoContext.linkedAccountDao.findByProvider(getKey(), series).get
getAuth.getUserService.unlink(authUser)
daoContext.cookieTokenSeriesDao.deleteBySeries(linkedAccountRow.userId, series)
}
//-------------------------------------------------------------------
override def check(cookieAuthUser: CookieAuthUser): CookieAuthProvider.CheckResult = {
if (cookieAuthUser.getSeries == null) {
return CheckResult.MISSING_SERIES
}
val linkedAccount = daoContext.linkedAccountDao.findByProvider(PROVIDER_KEY, cookieAuthUser.getSeries).
getOrElse(return CheckResult.ERROR)
val cookieSeries = daoContext.cookieTokenSeriesDao.findBySeries(linkedAccount.userId, linkedAccount.providerUserId).
getOrElse(return CheckResult.MISSING_SERIES)
if (!(cookieSeries.token == cookieAuthUser.getToken)) return CheckResult.INVALID_TOKEN
val timeCreated = LocalDateTime.ofInstant(Instant.ofEpochMilli(cookieSeries.created.get.getTime),
TimeZone.getDefault().toZoneId())
val timeUpdated = LocalDateTime.ofInstant(Instant.ofEpochMilli(cookieSeries.modified.get.getTime),
TimeZone.getDefault().toZoneId())
val daysSinceCreated = ChronoUnit.DAYS.between(timeCreated, LocalDateTime.now)
val daysSinceUpdated = ChronoUnit.DAYS.between(timeUpdated, LocalDateTime.now)
val timeoutDaysSinceCreated = auth.getConfiguration.getLong("cookie.timeoutDays.sinceFirstLogin")
val timeoutDaysSinceUpdated = auth.getConfiguration.getLong("cookie.timeoutDays.sinceLastLogin")
if (daysSinceCreated > timeoutDaysSinceCreated) {
return CheckResult.EXPIRED
}
if (daysSinceUpdated > timeoutDaysSinceUpdated) {
return CheckResult.EXPIRED
}
return CheckResult.SUCCESS
}
//-------------------------------------------------------------------
override def renew(cookieAuthUser: CookieAuthUser, newToken: String): Unit = {
val linkedAccountRow = daoContext.linkedAccountDao.findByProvider(PROVIDER_KEY, cookieAuthUser.getSeries).get
val cookieSeriesRow = daoContext.cookieTokenSeriesDao.findBySeries(linkedAccountRow.userId, linkedAccountRow.providerUserId).get
daoContext.cookieTokenSeriesDao.updateToken(cookieSeriesRow, newToken)
}
} | bravegag/play-authenticate-usage-scala | app/providers/MyCookieAuthProvider.scala | Scala | apache-2.0 | 3,805 |
package org.ergoplatform
import sigmastate.SCollection.SByteArray
import sigmastate.Values._
import sigmastate.eval.IRContext
import sigmastate.interpreter.{Interpreter, PrecompiledScriptProcessor}
import sigmastate.utxo._
/** Base class of verifying interpreter which expects ErgoLikeContext as input of
* verify method.
* It implements deserialization of register of SELF box.
*/
class ErgoLikeInterpreter(implicit val IR: IRContext) extends Interpreter {
override type CTX <: ErgoLikeContext
override val precompiledScriptProcessor: PrecompiledScriptProcessor = PrecompiledScriptProcessor.Default
override def substDeserialize(context: CTX, updateContext: CTX => Unit, node: SValue): Option[SValue] = node match {
case d: DeserializeRegister[_] =>
context.boxesToSpend(context.selfIndex).get(d.reg).flatMap { v =>
v match {
case eba: EvaluatedValue[SByteArray]@unchecked =>
val (ctx1, outVal) = deserializeMeasured(context, eba.value.toArray)
updateContext(ctx1)
if (outVal.tpe != d.tpe)
sys.error(s"Failed deserialization of $d: expected deserialized value to have type ${d.tpe}; got ${outVal.tpe}")
else
Some(outVal)
case _ =>
// TODO HF (1h): this case is not possible because `ErgoBox.get`
// returns lookups values from `additionalRegisters` Map with values
// of type EvaluatedValue, which are always Constant nodes in practice.
// Also, this branch is never executed so can be safely removed
// (better as part of the HF)
None
}
}.orElse(d.default)
case _ => super.substDeserialize(context, updateContext, node)
}
} | ScorexFoundation/sigmastate-interpreter | sigmastate/src/main/scala/org/ergoplatform/ErgoLikeInterpreter.scala | Scala | mit | 1,797 |
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers
import java.util.UUID
import connectors.launchpadgateway.exchangeobjects.in._
import connectors.launchpadgateway.exchangeobjects.in.reviewed._
import org.joda.time.{ DateTime, LocalDate }
import org.mockito.ArgumentMatchers.{ eq => eqTo, _ }
import org.mockito.Mockito._
import play.api.mvc.RequestHeader
import play.api.test.FakeRequest
import play.api.test.Helpers._
import services.onlinetesting.phase3.{ Phase3TestCallbackService, Phase3TestService }
import testkit.UnitWithAppSpec
import uk.gov.hmrc.http.HeaderCarrier
import scala.concurrent.Future
class LaunchpadTestsControllerSpec extends UnitWithAppSpec {
trait TestFixture {
implicit val hc = HeaderCarrier()
implicit val rh: RequestHeader = FakeRequest("GET", "some/path")
val mockPhase3TestService = mock[Phase3TestService]
val mockPhase3TestCallbackService = mock[Phase3TestCallbackService]
val sampleCandidateId = UUID.randomUUID().toString
val sampleCustomCandidateId = "FSCND-456"
val sampleInviteId = "FSINV-123"
val sampleInterviewId = 123
val sampleDeadline = LocalDate.now.plusDays(7)
when(mockPhase3TestCallbackService.recordCallback(any[QuestionCallbackRequest]())).thenReturn(Future.successful(()))
when(mockPhase3TestCallbackService.recordCallback(any[FinishedCallbackRequest]())
(any[HeaderCarrier](), any[RequestHeader])).thenReturn(Future.successful(()))
when(mockPhase3TestCallbackService.recordCallback(any[FinalCallbackRequest]())
(any[HeaderCarrier](), any[RequestHeader])).thenReturn(Future.successful(()))
when(mockPhase3TestCallbackService.recordCallback(any[ViewPracticeQuestionCallbackRequest]())).thenReturn(Future.successful(()))
when(mockPhase3TestCallbackService.recordCallback(any[SetupProcessCallbackRequest]())
(any[HeaderCarrier](), any[RequestHeader]())).thenReturn(Future.successful(()))
when(mockPhase3TestCallbackService.recordCallback(any[ViewBrandedVideoCallbackRequest]())).thenReturn(Future.successful(()))
when(mockPhase3TestCallbackService.recordCallback(any[ReviewedCallbackRequest]())
(any[HeaderCarrier](), any[RequestHeader]())).thenReturn(Future.successful(()))
def controllerUnderTest = new LaunchpadTestsController(
stubControllerComponents(playBodyParsers = stubPlayBodyParsers(materializer)),
mockPhase3TestService,
mockPhase3TestCallbackService
)
val sampleSetupProcessCallback = SetupProcessCallbackRequest(
DateTime.now(),
sampleCandidateId,
sampleCustomCandidateId,
sampleInterviewId,
None,
sampleInviteId,
sampleDeadline
)
val sampleViewPracticeQuestionCallback = ViewPracticeQuestionCallbackRequest(
DateTime.now(),
sampleCandidateId,
sampleCustomCandidateId,
sampleInterviewId,
None,
sampleInviteId,
sampleDeadline
)
val sampleQuestionCallback = QuestionCallbackRequest(
DateTime.now(),
sampleCandidateId,
sampleCustomCandidateId,
sampleInterviewId,
None,
sampleInviteId,
sampleDeadline,
"1"
)
val sampleFinalCallback = FinalCallbackRequest(
DateTime.now(),
sampleCandidateId,
sampleCustomCandidateId,
sampleInterviewId,
None,
sampleInviteId,
sampleDeadline
)
val sampleFinishedCallback = FinishedCallbackRequest(
DateTime.now(),
sampleCandidateId,
sampleCustomCandidateId,
sampleInterviewId,
None,
sampleInviteId,
sampleDeadline
)
private def generateReviewedQuestion(i: Int, score1: Option[Double], score2: Option[Double]) = {
ReviewSectionQuestionRequest(
i,
ReviewSectionCriteriaRequest(
"numeric",
score1
),
ReviewSectionCriteriaRequest(
"numeric",
score2
)
)
}
val sampleReviewedCallback = ReviewedCallbackRequest(
DateTime.now(),
sampleCandidateId,
sampleCustomCandidateId,
sampleInterviewId,
None,
sampleInviteId,
sampleDeadline,
ReviewSectionRequest(
ReviewSectionTotalAverageRequest(
"video_interview",
"46%",
46.0
),
ReviewSectionReviewersRequest(
ReviewSectionReviewerRequest(
"John Smith",
"[email protected]",
Some("This is a comment"),
generateReviewedQuestion(1, None, None),
generateReviewedQuestion(2, Some(1.0), Some(2.0)),
generateReviewedQuestion(3, Some(3.0), Some(2.0)),
generateReviewedQuestion(4, Some(4.0), Some(2.5)),
generateReviewedQuestion(5, Some(5.0), Some(2.5)),
generateReviewedQuestion(6, Some(4.5), Some(1.0)),
generateReviewedQuestion(7, Some(3.5), Some(5.0)),
generateReviewedQuestion(8, Some(2.5), Some(2.5))
), None, None
)
)
)
}
"setup-process callback" should {
"respond ok" in new TestFixture {
val response = controllerUnderTest.setupProcessCallback(sampleInviteId)(fakeRequest(sampleSetupProcessCallback))
status(response) mustBe OK
verify(mockPhase3TestCallbackService, times(1)).recordCallback(any[SetupProcessCallbackRequest]()
)(any[HeaderCarrier](), any[RequestHeader]())
}
}
"view-practice-question callback" should {
"respond ok" in new TestFixture {
val response = controllerUnderTest.viewPracticeQuestionCallback(sampleInviteId)(fakeRequest(sampleViewPracticeQuestionCallback))
status(response) mustBe OK
verify(mockPhase3TestCallbackService, times(1)).recordCallback(any[ViewPracticeQuestionCallbackRequest]())
}
}
"question callback" should {
"respond ok" in new TestFixture {
val response = controllerUnderTest.questionCallback(sampleInviteId)(fakeRequest(sampleQuestionCallback))
status(response) mustBe OK
verify(mockPhase3TestCallbackService, times(1)).recordCallback(any[QuestionCallbackRequest]())
}
}
"final callback" should {
"respond ok" in new TestFixture {
val response = controllerUnderTest.finalCallback(sampleInviteId)(fakeRequest(sampleFinalCallback))
status(response) mustBe OK
verify(mockPhase3TestCallbackService, times(1)).recordCallback(eqTo(sampleFinalCallback))(any[HeaderCarrier](), any[RequestHeader]())
}
}
"finished callback" should {
"respond ok" in new TestFixture {
val response = controllerUnderTest.finishedCallback(sampleInviteId)(fakeRequest(sampleFinishedCallback))
status(response) mustBe OK
verify(mockPhase3TestCallbackService, times(1)).recordCallback(eqTo(sampleFinishedCallback))(any[HeaderCarrier](), any[RequestHeader]())
}
}
"reviewed callback" should {
"respond ok" in new TestFixture {
val response = controllerUnderTest.reviewedCallback(sampleInviteId)(fakeRequest(sampleReviewedCallback))
status(response) mustBe OK
verify(mockPhase3TestCallbackService, times(1)).recordCallback(eqTo(sampleReviewedCallback))(any[HeaderCarrier](), any[RequestHeader]())
}
}
}
| hmrc/fset-faststream | test/controllers/LaunchpadTestsControllerSpec.scala | Scala | apache-2.0 | 7,773 |
package com.easyforger.creatures
import net.minecraft.entity.monster.EntityZombie
import net.minecraft.world.World
case class ZombieConfig(common: CommonEntityConfig = CommonEntityConfig()) extends CreatureConfig
class CustomZombie(world: World) extends EntityZombie(world) with CommonCustomMonster {
val zombie = VanillaCreatures.zombieConfig
val config = zombie.common
init()
}
| ThiagoGarciaAlves/easyforger | src/main/scala/com/easyforger/creatures/CustomZombie.scala | Scala | gpl-3.0 | 390 |
package com.ibm.watson.developer_cloud.language_translation.v2.model
import java.io.File
import com.ibm.watson.developer_cloud.service.GenericModel
/**
* Created by Martin Harvan ([email protected]) on 20/03/16.
*/
case class CreateModelOptions(baseModelId: String, forcedGlossary: File, monlingualCorpus: File, name: String, parallelCorpus: File)extends GenericModel
| kane77/watson-scala-wrapper | src/main/scala/com/ibm/watson/developer_cloud/language_translation/v2/model/CreateModelOptions.scala | Scala | apache-2.0 | 382 |
package org.bitcoins.core.number
/** Helper trait to cache data types that represent numbers
* Examples are [[org.bitcoins.core.script.constant.ScriptNumber]]
* [[UInt32]] [[UInt64]] etc
*/
trait NumberCache[T] {
def fromNativeNumber(long: Long): T
/** The minimum number cached (inclusive) */
def minCached: Long = 0
/** The max number cached (inclusive) */
def maxCached: Long = 255
private lazy val cache: Vector[T] = {
minCached.to(maxCached).map(fromNativeNumber).toVector
}
/** Checks if the given number is cached
* if not, allocates a new object to represent the number
*/
def checkCached(long: Long): T = {
if (long <= maxCached && long >= minCached) cache(long.toInt)
else {
fromNativeNumber(long)
}
}
}
/** Number cache, except for scala [[BigInt]] */
trait NumberCacheBigInt[T] extends NumberCache[T] {
private val bigIntCache: Vector[T] = {
minCachedBigInt
.to(maxCachedBigInt)
.map(fromBigInt)
.toVector
}
def fromBigInt(bigInt: BigInt): T
/** The minimum number cached (inclusive) */
def minCachedBigInt: BigInt = BigInt(minCached)
/** The max number cached (inclusive) */
def maxCachedBigInt: BigInt = BigInt(maxCached)
/** [[org.bitcoins.core.protocol.CompactSizeUInt]] uses a UInt64
* which means we have larger uint64s used on a regular basis
*/
override def maxCached: Long = 2048
/** Checks if the given number is cached
* if not, allocates a new object to represent the number
*/
def checkCachedBigInt(bigInt: BigInt): T = {
if (bigInt <= maxCachedBigInt && bigInt >= minCachedBigInt)
bigIntCache(bigInt.toInt)
else {
fromBigInt(bigInt)
}
}
}
| bitcoin-s/bitcoin-s | core/src/main/scala/org/bitcoins/core/number/NumberCache.scala | Scala | mit | 1,719 |
package org.jetbrains.plugins.scala
package lang
package psi
package stubs
import api.toplevel.ScEarlyDefinitions
import com.intellij.psi.stubs.StubElement
/**
* User: Alexander Podkhalyuzin
* Date: 17.06.2009
*/
trait ScEarlyDefinitionsStub extends StubElement[ScEarlyDefinitions] {
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/psi/stubs/ScEarlyDefinitionsStub.scala | Scala | apache-2.0 | 293 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sparklinedata.druid.metadata
import org.apache.spark.Logging
import org.apache.spark.sql.{SQLContext, DataFrame}
import org.apache.spark.sql.types._
import org.sparklinedata.druid.client.DruidClient
import scala.collection.mutable.{Map => MMap}
case class DruidClientInfo(host : String, port : Int)
case class DruidRelationInfo(val druidClientInfo : DruidClientInfo,
val sourceDFName : String,
val timeDimensionCol : String,
val druidDS : DruidDataSource,
val sourceToDruidMapping : Map[String, DruidColumn],
val fd : FunctionalDependencies,
val maxCardinality : Long,
val cardinalityPerDruidQuery : Long,
val allowCountDistinct : Boolean) {
def sourceDF(sqlContext : SQLContext) = sqlContext.table(sourceDFName)
}
object DruidRelationInfo {
// scalastyle:off parameter.number
def apply(sourceDFName : String,
sourceDF : DataFrame,
dsName : String,
timeDimensionCol : String,
druidHost : String,
druidPort : Int,
columnMapping : Map[String, String],
functionalDeps : List[FunctionalDependency],
maxCardinality : Long,
cardinalityPerDruidQuery : Long,
allowCountDistinct : Boolean = true) : DruidRelationInfo = {
val client = new DruidClient(druidHost, druidPort)
val druidDS = client.metadata(dsName)
val sourceToDruidMapping =
MappingBuilder.buildMapping(columnMapping, sourceDF, timeDimensionCol, druidDS)
val fd = new FunctionalDependencies(druidDS, functionalDeps,
DependencyGraph(druidDS, functionalDeps))
DruidRelationInfo(DruidClientInfo(druidHost, druidPort),
sourceDFName,
timeDimensionCol,
druidDS,
sourceToDruidMapping,
fd,
maxCardinality,
cardinalityPerDruidQuery,
allowCountDistinct)
}
}
private object MappingBuilder extends Logging {
/**
* Only top level Numeric and String Types are mapped.
* @param dT
* @return
*/
def supportedDataType(dT : DataType) : Boolean = dT match {
case t if t.isInstanceOf[NumericType] => true
case StringType => true
case _ => false
}
def buildMapping( nameMapping : Map[String, String],
sourceDF : DataFrame,
timeDimensionCol : String,
druidDS : DruidDataSource) : Map[String, DruidColumn] = {
val m = MMap[String, DruidColumn]()
sourceDF.schema.iterator.foreach { f =>
if ( supportedDataType(f.dataType)) {
val dCol = druidDS.columns.get(nameMapping.getOrElse(f.name, f.name))
if ( dCol.isDefined) {
m += (f.name -> dCol.get)
} else if (f.name == timeDimensionCol) {
m += (f.name -> druidDS.timeDimension.get)
}
} else {
logDebug(s"${f.name} not mapped to Druid dataSource, unsupported dataType")
}
}
m.toMap
}
}
| benschaff/spark-druid-olap | src/main/scala/org/sparklinedata/druid/metadata/DruidRelationInfo.scala | Scala | apache-2.0 | 3,901 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.codegen.calls
import org.apache.flink.table.codegen.GenerateUtils.generateCallIfArgsNotNull
import org.apache.flink.table.codegen.{CodeGenUtils, CodeGeneratorContext, GeneratedExpression}
import org.apache.flink.table.types.logical.LogicalType
class HashCodeCallGen extends CallGenerator {
override def generate(
ctx: CodeGeneratorContext,
operands: Seq[GeneratedExpression],
returnType: LogicalType): GeneratedExpression = {
val inputTerm = operands.head.resultTerm
val inputType = operands.head.resultType
val code = CodeGenUtils.hashCodeForType(ctx, inputType, inputTerm)
generateCallIfArgsNotNull(ctx, returnType, operands) { _ => code }
}
}
| shaoxuan-wang/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/codegen/calls/HashCodeCallGen.scala | Scala | apache-2.0 | 1,525 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.utils.tf.loaders
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.tf.Tensorflow.{booleanAttr, intAttr, typeAttr}
import com.intel.analytics.bigdl.utils.tf.TensorflowSpecHelper
import org.tensorflow.framework.{DataType, NodeDef}
class TopKV2Spec extends TensorflowSpecHelper {
"TopKV2" should "be correct for float tensor" in {
compare[Float](
NodeDef.newBuilder()
.setName("topk_test")
.putAttr("T", typeAttr(DataType.DT_FLOAT))
.setOp("TopKV2"),
Seq(Tensor[Float](5, 5, 6).rand(), Tensor.scalar[Int](2)),
0
)
compare[Float](
NodeDef.newBuilder()
.setName("topk_test")
.putAttr("T", typeAttr(DataType.DT_FLOAT))
.setOp("TopKV2"),
Seq(Tensor[Float](5, 5, 6).rand(), Tensor.scalar[Int](2)),
1
)
}
"TopKV2" should "be correct for 1D float tensor" in {
compare[Float](
NodeDef.newBuilder()
.setName("topk_test")
.putAttr("T", typeAttr(DataType.DT_FLOAT))
.setOp("TopKV2"),
Seq(Tensor[Float](5).rand(), Tensor.scalar[Int](2)),
0
)
compare[Float](
NodeDef.newBuilder()
.setName("topk_test")
.putAttr("T", typeAttr(DataType.DT_FLOAT))
.setOp("TopKV2"),
Seq(Tensor[Float](5).rand(), Tensor.scalar[Int](2)),
1
)
}
"TopKV2" should "be correct for float tensor when sorted is false" in {
compare[Float](
NodeDef.newBuilder()
.setName("topk_test")
.putAttr("T", typeAttr(DataType.DT_FLOAT))
.putAttr("sorted", booleanAttr(false))
.setOp("TopKV2"),
Seq(Tensor[Float](5, 5, 6).rand(), Tensor.scalar[Int](2)),
0
)
compare[Float](
NodeDef.newBuilder()
.setName("topk_test")
.putAttr("T", typeAttr(DataType.DT_FLOAT))
.putAttr("sorted", booleanAttr(false))
.setOp("TopKV2"),
Seq(Tensor[Float](5, 5, 6).rand(), Tensor.scalar[Int](2)),
1
)
}
}
| luchy0120/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/utils/tf/loaders/TopKV2Spec.scala | Scala | apache-2.0 | 2,634 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.businessdetails
import cats.data.OptionT
import cats.implicits._
import com.google.inject.Inject
import connectors.{BusinessMatchingConnector, DataCacheConnector}
import controllers.{AmlsBaseController, CommonPlayDependencies}
import models.businessdetails.{BusinessDetails, CorporationTaxRegistered, CorporationTaxRegisteredYes}
import models.businessmatching.BusinessMatching
import models.businessmatching.BusinessType.{LPrLLP, LimitedCompany}
import play.api.mvc.{MessagesControllerComponents, Request, Result}
import uk.gov.hmrc.http.HeaderCarrier
import uk.gov.hmrc.http.cache.client.CacheMap
import utils.AuthAction
import utils.ControllerHelper
import scala.concurrent.Future
// This controller no longer has a vew or POST method. The UTR is acquired in BM and should be copied
// to Business Details only once pre-submission. API5 then populates this field from ETMP. The user
// should have no requirement to update it.
class CorporationTaxRegisteredController @Inject () (val dataCacheConnector: DataCacheConnector,
val businessMatchingConnector: BusinessMatchingConnector,
val authAction: AuthAction,
val ds: CommonPlayDependencies,
val cc: MessagesControllerComponents,
implicit val error: views.html.error) extends AmlsBaseController(ds, cc) {
val failedResult = InternalServerError("Failed to update the business corporation tax number")
def get() = authAction.async {
implicit request =>
filterByBusinessType ( request.credId, cache =>
cache.getEntry[BusinessDetails](BusinessDetails.key) match {
case _ =>
(for {
bm <- OptionT.fromOption[Future](cache.getEntry[BusinessMatching](BusinessMatching.key))
details <- OptionT.fromOption[Future](bm.reviewDetails)
// Only update Business Details from Business Matching where it exists; after that its maintained by API5
_ <- if (details.utr.isDefined) {
updateCache(request.credId, cache, CorporationTaxRegisteredYes(details.utr.getOrElse(
throw new Exception("[CorporationTaxRegisteredController][get]: Could not retrieve UTR from Business Matching")
)))
} else {
OptionT.fromOption[Future](Some(cache))
}
} yield Redirect(routes.ConfirmRegisteredOfficeController.get())) getOrElse
InternalServerError("[CorporationTaxRegisteredController][get]: Could not route from CorporationTaxRegisteredController")
}
)
}
private def filterByBusinessType(cacheId: String, fn: CacheMap => Future[Result])(implicit hc:HeaderCarrier, request: Request[_]): Future[Result] = {
OptionT(dataCacheConnector.fetchAll(cacheId)) flatMap { cache =>
ControllerHelper.getBusinessType(cache.getEntry[BusinessMatching](BusinessMatching.key)) match {
case Some((LPrLLP | LimitedCompany)) => OptionT.liftF(fn(cache))
case _ => OptionT.pure[Future, Result](NotFound(notFoundView))
}
} getOrElse InternalServerError("Could not retrieve business type")
}
private def updateCache(cacheId: String, cache: CacheMap, data: CorporationTaxRegistered)(implicit hc: HeaderCarrier) = for {
businessDetails <- OptionT.fromOption[Future](cache.getEntry[BusinessDetails](BusinessDetails.key))
cacheMap <- OptionT.liftF(dataCacheConnector.save[BusinessDetails](cacheId, BusinessDetails.key, businessDetails.corporationTaxRegistered(data)))
} yield cacheMap
}
| hmrc/amls-frontend | app/controllers/businessdetails/CorporationTaxRegisteredController.scala | Scala | apache-2.0 | 4,360 |
/*
* Copyright (C) 2017 The Proteus Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.proteus.peach.client
import java.util.concurrent.TimeUnit
import com.proteus.peach.server.PeachServer
import org.junit.AfterClass
import org.junit.BeforeClass
import scala.concurrent.duration.Duration
object PeachAkkaClientIT {
/**
* Server instance.
*/
lazy val Server: PeachServer = new PeachServer()
/**
* Init cache server.
*/
@BeforeClass
def beforeAll(): Unit = {
Server.init()
Server.run()
}
/**
* Stop cache server.
*/
@AfterClass
def afterAll(): Unit = {
Server.shutdown()
}
}
class PeachAkkaClientIT extends PeachClientValidator {
/**
* Client cache to test.
*/
override lazy val clientCache: PeachClient = PeachAkkaClient()
}
| aagea/peach | peach-client/src/test/scala/com/proteus/peach/client/PeachAkkaClientIT.scala | Scala | apache-2.0 | 1,325 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.graphx.lib
import org.apache.spark.SparkFunSuite
import org.apache.spark.graphx._
class StronglyConnectedComponentsSuite extends SparkFunSuite with LocalSparkContext {
test("Island Strongly Connected Components") {
withSpark { sc =>
val vertices = sc.parallelize((1L to 5L).map(x => (x, -1)))
val edges = sc.parallelize(Seq.empty[Edge[Int]])
val graph = Graph(vertices, edges)
val sccGraph = graph.stronglyConnectedComponents(5)
for ((id, scc) <- sccGraph.vertices.collect()) {
assert(id === scc)
}
}
}
test("Cycle Strongly Connected Components") {
withSpark { sc =>
val rawEdges = sc.parallelize((0L to 6L).map(x => (x, (x + 1) % 7)))
val graph = Graph.fromEdgeTuples(rawEdges, -1)
val sccGraph = graph.stronglyConnectedComponents(20)
for ((id, scc) <- sccGraph.vertices.collect()) {
assert(0L === scc)
}
}
}
test("2 Cycle Strongly Connected Components") {
withSpark { sc =>
val edges =
Array(0L -> 1L, 1L -> 2L, 2L -> 0L) ++
Array(3L -> 4L, 4L -> 5L, 5L -> 3L) ++
Array(6L -> 0L, 5L -> 7L)
val rawEdges = sc.parallelize(edges)
val graph = Graph.fromEdgeTuples(rawEdges, -1)
val sccGraph = graph.stronglyConnectedComponents(20)
for ((id, scc) <- sccGraph.vertices.collect()) {
if (id < 3) {
assert(0L === scc)
} else if (id < 6) {
assert(3L === scc)
} else {
assert(id === scc)
}
}
}
}
}
| mike0sv/spark | graphx/src/test/scala/org/apache/spark/graphx/lib/StronglyConnectedComponentsSuite.scala | Scala | apache-2.0 | 2,363 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.mandar2812.dynaml.prototype
import breeze.linalg.DenseVector
import org.apache.spark.mllib.linalg.Vectors
import org.apache.spark.mllib.regression.LabeledPoint
import org.apache.spark.rdd.RDD
import io.github.mandar2812.dynaml.kernels.DensityKernel
/**
* Implements the quadratic Renyi Entropy
*/
class QuadraticRenyiEntropy(dist: DensityKernel)
extends EntropyMeasure
with Serializable {
val log_e = scala.math.log _
val sqrt = scala.math.sqrt _
override protected val density: DensityKernel = dist
/**
* Calculate the quadratic Renyi entropy
* within a distribution specific
* proportionality constant. This can
* be used to compare the entropy values of
* different sets of data on the same
* distribution.
*
* @param data The data set whose entropy is
* required.
* @return The entropy of the dataset assuming
* it is distributed as given by the value
* parameter 'density'.
* */
override def entropy(data: List[DenseVector[Double]]): Double = {
val dim = data.head.length
val root_two: breeze.linalg.Vector[Double] = DenseVector.fill(dim, sqrt(2))
val product = for(i <- data.view; j <- data.view) yield (i, j)
-1*log_e(product.map((couple) =>
density.eval((couple._1 - couple._2) :/ root_two)).sum)
}
override def entropy[K](data: RDD[(K, LabeledPoint)]): Double = {
val dim = data.first()._2.features.size
-1*log_e(data.cartesian(data).map((couple) =>{
val point1: DenseVector[Double] = DenseVector(couple._1._2.features.toArray) / sqrt(2.0)
val point2: DenseVector[Double] = DenseVector(couple._2._2.features.toArray) / sqrt(2.0)
density.eval(point1 - point2)
}).reduce((a,b) => a + b))
}
def entropyDifference(entropy: Double,
data: List[DenseVector[Double]],
add: DenseVector[Double],
remove: DenseVector[Double]): Double = {
val dim = data.head.length
val expEntropy = math.exp(-1.0*entropy)
val root_two: breeze.linalg.Vector[Double] = DenseVector.fill(dim, sqrt(2))
val product1 = for(i <- data.view) yield (remove, i)
val subtractEnt = product1.map((couple) =>
density.eval((couple._1 - couple._2) :/ root_two)).sum
val product2 = for(i <- data.view) yield (add, i)
val addEnt = product2.map((couple) =>
density.eval((couple._1 - couple._2) :/ root_two)).sum -
density.eval((add - remove) :/ root_two)
-1.0*log_e(expEntropy + addEnt - subtractEnt) - entropy
}
}
| twitwi/DynaML | src/main/scala/io/github/mandar2812/dynaml/prototype/QuadraticRenyiEntropy.scala | Scala | apache-2.0 | 3,372 |
/**
*
* This file is part of Fixbugs.
*
* Fixbugs is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Fixbugs is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Fixbugs. If not, see <http://www.gnu.org/licenses/>.
*
**/
package fixbugs.core.ir
import org.scalacheck._
import Arbitrary._
import Gen._
/**
* nc := E Path
* | A Path
* | nu|mu X. nc
* | nc ^ nc
* | nc \\/ nc
* | ¬ nc
* | NodePred
* | True
* | False
*/
sealed abstract class NodeCondition {}
case class All(path:Path) extends NodeCondition
case class Exists(path:Path) extends NodeCondition
case class Mu(varName:String,phi:NodeCondition) extends NodeCondition
case class Nu(varName:String,phi:NodeCondition) extends NodeCondition
case class And(left:NodeCondition,right:NodeCondition) extends NodeCondition
case class Or(left:NodeCondition,right:NodeCondition) extends NodeCondition
case class Not(phi:NodeCondition) extends NodeCondition
//case class LineNumber(number:Int) extends NodeCondition
case class NodePred(node:String) extends NodeCondition
case class True() extends NodeCondition
case class False() extends NodeCondition
case class StmtPred(stmt:Statement) extends NodeCondition
/*
object NodeCondition {
// IR Generator crap
// TODO: refactor common code
val genLeaf = oneOf(value(True),value(False),for(e <- Arbitrary.arbitrary[String]) yield NodePred(e))
def genUnary(sz:Int) = for { phi <- genTree(sz-1) } yield Not(phi)
def genAnd(sz:Int) = for {
phi <- genTree(sz/2)
psi <- genTree(sz/2)
} yield And(phi,psi)
def genOr(sz:Int) = for {
phi <- genTree(sz/2)
psi <- genTree(sz/2)
} yield And(phi,psi)
def genPath(sz:Int) = oneOf(genFuture(sz),genGlobal(sz))
def genFuture(sz:Int) = for { phi <- genTree(sz-1) } yield Future(phi)
def genGlobal(sz:Int) = for { phi <- genTree(sz-1) } yield Global(phi)
def genAll(sz:Int) = for { p <- genPath(sz-1) } yield All(p)
def genExists(sz:Int) = for { p <- genPath(sz-1) } yield Exists(p)
def genTree(sz:Int): Gen[NodeCondition] =
if(sz <= 0) genLeaf
else if (sz <= 1) oneOf(genUnary(sz),genLeaf)
else if(sz <= 2) oneOf(genAnd(sz),genOr(sz),genUnary(sz),genLeaf)
else oneOf(genExists(sz),genAll(sz))
implicit val arbFoo: Arbitrary[NodeCondition] = Arbitrary { Gen.sized(sz => genTree(sz)) }
}
*/
| FauxFaux/fixbugs | src/main/java/fixbugs/core/ir/NodeCondition.scala | Scala | lgpl-3.0 | 2,858 |
package org.qirx.cms.machinery
import scala.language.higherKinds
import scala.language.implicitConversions
import scala.annotation.implicitNotFound
object Coproduct {
def apply[Head[_], Tail[_], x](value: Either[Head[x], Tail[x]])(
implicit ev: Coproduct.NotAtLeft[Head]) = Co[Head, Tail].Product(value)
@implicitNotFound("There can be no coproducts on the left, import Coproduct.proof._ if this is wrong, found: ${F}")
type NotAtLeft[F[_]] = IsNotCoproduct[F]
trait IsCoproduct[F[_]]
object IsCoproduct {
implicit def coproduct[Head[_], Tail[_]]: IsCoproduct[Co[Head, Tail]#Product] = null
}
trait IsNotCoproduct[F[_]]
object IsNotCoproduct {
/*
For types that are a coproduct we provide an ambigous IsNot
value, this makes the real one unusable
*/
implicit def isCoproduct[F[_]](implicit ev: IsCoproduct[F]): IsNotCoproduct[F] = null
implicit def isNotCoproduct[F[_]]: IsNotCoproduct[F] = null
implicit def nothingIsNotCoproduct: IsNotCoproduct[Nothing] = null
}
private type ::[Head[_], Tail[_]] = Co[Head, Tail]
trait IsIdentity[F[_]]
object IsIdentity {
type Id[x] = x
implicit def identity[F[_]](implicit ev: F[_] =:= Id[_]): IsIdentity[F] = null
}
trait IsNotIdentity[F[_]]
object IsNotIdentity {
implicit def identity[F[_]](implicit ev: IsIdentity[F]): IsNotIdentity[F] = null
implicit def notIdentity[F[_]]: IsNotIdentity[F] = null
}
trait LowerPriorityTransformations {
implicit def none[Elem[_]](
implicit ev: IsNotIdentity[Elem]) =
new (Elem ~> Elem) {
def transform[x] = identity
}
implicit def atHead[Elem[_], Tail[_]](
implicit ev: IsNotCoproduct[Elem]) =
new (Elem ~> (Elem :: Tail)#T) {
def transform[x] = elem =>
new (Elem :: Tail).Product(Left(elem))
}
implicit def inTail[Elem[_], Head[_], Tail[_]](
implicit ev1: IsNotCoproduct[Elem],
ev2: IsNotCoproduct[Head],
transformTail: Elem ~> Tail) =
new (Elem ~> (Head :: Tail)#T) {
def transform[x] = elem =>
new (Head :: Tail).Product(Right(transformTail(elem)))
}
}
trait Transformations extends LowerPriorityTransformations {
implicit def isCoProduct[Head[_], Tail[_], Target[_]](
implicit ev: IsNotCoproduct[Head],
transformHead: Head ~> Target,
transformTail: Tail ~> Target) =
new ((Head :: Tail)#T ~> Target) {
def transform[x] =
_.value match {
case Left(head) => transformHead(head)
case Right(tail) => transformTail(tail)
}
}
implicit def transformSource[F[_], Target[_], G[_]](fToTarget: F ~> Target)(
implicit gToF: G ~> F): G ~> Target = gToF andThen fToTarget
}
} | EECOLOR/play-cms | cms/src/main/scala/org/qirx/cms/machinery/Coproduct.scala | Scala | mit | 2,778 |
/*
* Copyright 2014–2018 SlamData Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package quasar
import slamdata.Predef._
import quasar.RenderTree.ops._
import quasar.fp._
import matryoshka._
import org.specs2.matcher._
import scalaz._, Scalaz._
trait TreeMatchers {
// TODO remove in favor of `beTreeEqual`
// uses `==`
def beTree[A: RenderTree](expected: A): Matcher[A] = new Matcher[A] {
def apply[S <: A](ex: Expectable[S]) = {
val actual: A = ex.value
val diff: String = (RenderTree[A].render(actual) diff expected.render).shows
result(actual == expected, s"trees match:\\n$diff", s"trees do not match:\\n$diff", ex)
}
}
// uses `scalaz.Equal`
def beTreeEqual[A: Equal: RenderTree](expected: A): Matcher[A] = new Matcher[A] {
def apply[S <: A](s: Expectable[S]) = {
val v: A = s.value
// TODO: these are unintuitively reversed b/c of the `diff` implementation, should be fixed
val diff = (RenderTree[A].render(v) diff expected.render).shows
result(v ≟ expected, s"trees match:\\n$diff", s"trees do not match:\\n$diff", s)
}
}
}
| jedesah/Quasar | foundation/src/test/scala/quasar/matchers.scala | Scala | apache-2.0 | 1,630 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.{Equality, Every, One, Many, Prettifier}
import org.scalactic.StringNormalizations._
import SharedHelpers._
import FailureMessages.decorateToStringValue
import Matchers._
import exceptions.TestFailedException
class EveryShouldContainOnlyLogicalAndSpec extends FunSpec {
private val prettifier = Prettifier.default
val invertedListOfStringEquality =
new Equality[Every[String]] {
def areEqual(a: Every[String], b: Any): Boolean = a != b
}
val upperCaseStringEquality =
new Equality[String] {
def areEqual(a: String, b: Any): Boolean = upperCase(a) == upperCase(b)
}
private def upperCase(value: Any): Any =
value match {
case l: Every[_] => l.map(upperCase(_))
case s: String => s.toUpperCase
case c: Char => c.toString.toUpperCase.charAt(0)
case (s1: String, s2: String) => (s1.toUpperCase, s2.toUpperCase)
case e: java.util.Map.Entry[_, _] =>
(e.getKey, e.getValue) match {
case (k: String, v: String) => Entry(k.toUpperCase, v.toUpperCase)
case _ => value
}
case _ => value
}
val upperCaseListOfStringEquality =
new Equality[Every[String]] {
def areEqual(a: Every[String], b: Any): Boolean = upperCase(a) == upperCase(b)
}
//ADDITIONAL//
val fileName: String = "EveryShouldContainOnlyLogicalAndSpec.scala"
describe("an Every") {
val fumList: Every[String] = Every("fum", "foe", "fie", "fee")
val toList: Every[String] = Every("you", "to", "birthday", "happy")
describe("when used with (contain only (..) and contain only (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (contain only ("fee", "fie", "foe", "fum") and contain only ("fie", "fee", "fum", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (contain only ("happy", "birthday", "to", "you") and contain only ("fee", "fie", "foe", "fum"))
}
checkMessageStackDepth(e1, Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"happy\\", \\"birthday\\", \\"to\\", \\"you\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (contain only ("fee", "fie", "foe", "fum") and contain only ("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e2, Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\"") + ", but " + Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"happy\\", \\"birthday\\", \\"to\\", \\"you\\""), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (contain only ("FEE", "FIE", "FOE", "FUM") and contain only ("FEE", "FIE", "FUM", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (contain only ("FEE", "FIE", "FOE", "FAM") and contain only ("FEE", "FIE", "FUM", "FOE"))
}
checkMessageStackDepth(e1, Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FAM\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (contain only ("FEE", "FIE", "FOE", "FUM") and (contain only ("FEE", "FIE", "FAM", "FOE")))
}
checkMessageStackDepth(e2, Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\"") + ", but " + Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FAM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (contain only ("FEE", "FIE", "FOE", "FUM") and contain only ("FEE", "FIE", "FUM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (contain only ("FEE", "FIE", "FOE", "FAM") and contain only ("FEE", "FIE", "FUM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FAM\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (contain only ("FEE", "FIE", "FOE", "FUM") and contain only ("FEE", "FIE", "FAM", "FOE"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FUM\\"") + ", but " + Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FAM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
(fumList should (contain only (" FEE ", " FIE ", " FOE ", " FUM ") and contain only (" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
it("should throw NotAllowedException with correct stack depth and message when RHS is empty") {
if (ScalaTestVersions.BuiltForScalaVersion != "2.13") { // For 2.13, the compiler will pass in args with single argument ().
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (contain only() and contain only("fie", "fee", "fum", "foe"))
}
e1.failedCodeFileName.get should be(fileName)
e1.failedCodeLineNumber.get should be(thisLineNumber - 3)
e1.message should be(Some(Resources.onlyEmpty))
val e2 = intercept[exceptions.NotAllowedException] {
fumList should (contain only("fie", "fee", "fum", "foe") and contain only())
}
e2.failedCodeFileName.get should be(fileName)
e2.failedCodeLineNumber.get should be(thisLineNumber - 3)
e2.message should be(Some(Resources.onlyEmpty))
} else {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (contain.only() and contain only("fie", "fee", "fum", "foe"))
}
e1.failedCodeFileName.get should be(fileName)
e1.failedCodeLineNumber.get should be(thisLineNumber - 3)
e1.message should be(Some(Resources.onlyEmpty))
val e2 = intercept[exceptions.NotAllowedException] {
fumList should (contain only("fie", "fee", "fum", "foe") and contain.only())
}
e2.failedCodeFileName.get should be(fileName)
e2.failedCodeLineNumber.get should be(thisLineNumber - 3)
e2.message should be(Some(Resources.onlyEmpty))
}
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (contain only ("fee", "fie", "foe", "fie", "fum") and contain only ("fie", "fee", "fum", "foe"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.onlyDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
fumList should (contain only ("fie", "fee", "fum", "foe") and contain only ("fee", "fie", "foe", "fie", "fum"))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.onlyDuplicate))
}
it("should throw TFE with friendly reminder when single GenTraversable argument is passed and failed") {
val e1 = intercept[TestFailedException] {
fumList should (contain only Many("happy", "birthday", "to", "you") and contain only ("fee", "fie", "foe", "fum"))
}
checkMessageStackDepth(e1, Resources.didNotContainOnlyElementsWithFriendlyReminder(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, Many("happy", "birthday", "to", "you"))), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (contain only ("fee", "fie", "foe", "fum") and contain only Many("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e2, Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\"") + ", but " + Resources.didNotContainOnlyElementsWithFriendlyReminder(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, Many("happy", "birthday", "to", "you"))), fileName, thisLineNumber - 2)
}
}
describe("when used with (equal (..) and contain only (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (equal (fumList) and contain only ("fie", "fee", "fum", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (equal (toList) and contain only ("fee", "fie", "foe", "fum"))
}
checkMessageStackDepth(e1, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (equal (fumList) and contain only ("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e2, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", but " + Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"happy\\", \\"birthday\\", \\"to\\", \\"you\\""), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (equal (fumList) and contain only ("FEE", "FIE", "FOE", "FUM"))
val e1 = intercept[TestFailedException] {
fumList should (equal (toList) and contain only ("FEE", "FIE", "FOE", "FUM"))
}
checkMessageStackDepth(e1, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (equal (fumList) and (contain only ("FEE", "FIE", "FOE", "FAM")))
}
checkMessageStackDepth(e2, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", but " + Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FAM\\""), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (equal (toList) and contain only ("FEE", "FIE", "FOE", "FUM"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (equal (toList) and contain only ("FEE", "FIE", "FOE", "FAM"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", but " + Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FEE\\", \\"FIE\\", \\"FOE\\", \\"FAM\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (equal (fumList) and contain only ("FEE", "FIE", "FOE", "FUM"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)), fileName, thisLineNumber - 2)
(fumList should (equal (toList) and contain only (" FEE ", " FIE ", " FOE ", " FUM "))) (decided by invertedListOfStringEquality, after being lowerCased and trimmed)
}
it("should throw NotAllowedException with correct stack depth and message when RHS is empty") {
if (ScalaTestVersions.BuiltForScalaVersion != "2.13") { // For 2.13, the compiler will pass in args with single argument ().
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (equal(fumList) and contain only())
}
e1.failedCodeFileName.get should be(fileName)
e1.failedCodeLineNumber.get should be(thisLineNumber - 3)
e1.message should be(Some(Resources.onlyEmpty))
} else {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (equal(fumList) and contain.only())
}
e1.failedCodeFileName.get should be(fileName)
e1.failedCodeLineNumber.get should be(thisLineNumber - 3)
e1.message should be(Some(Resources.onlyEmpty))
}
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (equal (fumList) and contain only ("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.onlyDuplicate))
}
it("should throw TFE with friendly reminder when single GenTraversable argument is passed and failed") {
val e1 = intercept[TestFailedException] {
fumList should (equal (fumList) and contain only Many("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e1, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", but " + Resources.didNotContainOnlyElementsWithFriendlyReminder(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, Many("happy", "birthday", "to", "you"))), fileName, thisLineNumber - 2)
}
}
describe("when used with (be (..) and contain only (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (be (fumList) and contain only ("fie", "fee", "fum", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (be (toList) and contain only ("fee", "fie", "foe", "fum"))
}
checkMessageStackDepth(e1, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (be (fumList) and contain only ("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e2, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", but " + Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"happy\\", \\"birthday\\", \\"to\\", \\"you\\""), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (be (fumList) and contain only ("FEE", "FIE", "FOE", "FUM"))
val e1 = intercept[TestFailedException] {
fumList should (be (toList) and contain only ("FEE", "FIE", "FOE", "FUM"))
}
checkMessageStackDepth(e1, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (be (fumList) and (contain only ("happy", "birthday", "to", "you")))
}
checkMessageStackDepth(e2, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", but " + Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"happy\\", \\"birthday\\", \\"to\\", \\"you\\""), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (be (fumList) and contain only ("FEE", "FIE", "FOE", "FUM"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (be (fumList) and contain only ("happy", "birthday", "to", "you"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", but " + Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"happy\\", \\"birthday\\", \\"to\\", \\"you\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (be (toList) and contain only ("FEE", "FIE", "FOE", "FUM"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
(fumList should (be (fumList) and contain only (" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed)
}
it("should throw NotAllowedException with correct stack depth and message when RHS is empty") {
if (ScalaTestVersions.BuiltForScalaVersion != "2.13") { // For 2.13, the compiler will pass in args with single argument ().
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (be(fumList) and contain only())
}
e1.failedCodeFileName.get should be(fileName)
e1.failedCodeLineNumber.get should be(thisLineNumber - 3)
e1.message should be(Some(Resources.onlyEmpty))
} else {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (be(fumList) and contain.only())
}
e1.failedCodeFileName.get should be(fileName)
e1.failedCodeLineNumber.get should be(thisLineNumber - 3)
e1.message should be(Some(Resources.onlyEmpty))
}
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (be (fumList) and contain only ("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.onlyDuplicate))
}
it("should throw TFE with friendly reminder when single GenTraversable argument is passed and failed") {
val e2 = intercept[TestFailedException] {
fumList should (be (fumList) and contain only Many("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e2, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", but " + Resources.didNotContainOnlyElementsWithFriendlyReminder(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, Many("happy", "birthday", "to", "you"))), fileName, thisLineNumber - 2)
}
}
describe("when used with (contain only (..) and be (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (contain only ("fie", "fee", "fum", "foe") and be (fumList))
val e1 = intercept[TestFailedException] {
fumList should (contain only ("fee", "fie", "foe", "fum") and be (toList))
}
checkMessageStackDepth(e1, Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\"") + ", but " + Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (contain only ("happy", "birthday", "to", "you") and be (fumList))
}
checkMessageStackDepth(e2, Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"happy\\", \\"birthday\\", \\"to\\", \\"you\\""), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (contain only ("FIE", "FEE", "FUM", "FOE") and be (fumList))
val e1 = intercept[TestFailedException] {
fumList should (contain only ("FIE", "FEE", "FAM", "FOE") and be (toList))
}
checkMessageStackDepth(e1, Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FAM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (contain only ("HAPPY", "BIRTHDAY", "TO", "YOU") and (be (fumList)))
}
checkMessageStackDepth(e2, Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"HAPPY\\", \\"BIRTHDAY\\", \\"TO\\", \\"YOU\\""), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (contain only ("FIE", "FEE", "FUM", "FOE") and be (fumList))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (contain only ("FIE", "FEE", "FAM", "FOE") and be (fumList))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FAM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (contain only ("FIE", "FEE", "FUM", "FOE") and be (toList))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\"") + ", but " + Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
(fumList should (contain only (" FEE ", " FIE ", " FOE ", " FUM ") and be (fumList))) (after being lowerCased and trimmed)
}
it("should throw NotAllowedException with correct stack depth and message when RHS is empty") {
if (ScalaTestVersions.BuiltForScalaVersion != "2.13") { // For 2.13, the compiler will pass in args with single argument ().
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (contain only() and be(fumList))
}
e1.failedCodeFileName.get should be(fileName)
e1.failedCodeLineNumber.get should be(thisLineNumber - 3)
e1.message should be(Some(Resources.onlyEmpty))
} else {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (contain.only() and be(fumList))
}
e1.failedCodeFileName.get should be(fileName)
e1.failedCodeLineNumber.get should be(thisLineNumber - 3)
e1.message should be(Some(Resources.onlyEmpty))
}
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (contain only ("fee", "fie", "foe", "fie", "fum") and be (fumList))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.onlyDuplicate))
}
it("should throw TFE with friendly reminder when single GenTraversable argument is passed and failed") {
val e1 = intercept[TestFailedException] {
fumList should (contain only Many("happy", "birthday", "to", "you") and be (fumList))
}
checkMessageStackDepth(e1, Resources.didNotContainOnlyElementsWithFriendlyReminder(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, Many("happy", "birthday", "to", "you"))), fileName, thisLineNumber - 2)
}
}
describe("when used with (not contain only xx and not contain only xx)") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (not contain only ("fee", "fie", "foe", "fuu") and not contain only ("fie", "fee", "fuu", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (not contain only ("fee", "fie", "foe", "fum") and not contain only ("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e1, Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not contain only ("happy", "birthday", "to", "you") and not contain only ("fee", "fie", "foe", "fum"))
}
checkMessageStackDepth(e2, Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"happy\\", \\"birthday\\", \\"to\\", \\"you\\"") + ", but " + Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\""), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (not contain only ("FIE", "FEE", "FAM", "FOE") and not contain only ("FIE", "FEE", "FOE", "FAM"))
val e1 = intercept[TestFailedException] {
fumList should (not contain only ("FIE", "FEE", "FUM", "FOE") and not contain only ("FIE", "FEE", "FOE", "FAM"))
}
checkMessageStackDepth(e1, Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not contain only ("FIE", "FEE", "FAM", "FOE") and (not contain only ("FIE", "FEE", "FOE", "FUM")))
}
checkMessageStackDepth(e2, Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FAM\\", \\"FOE\\"") + ", but " + Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FOE\\", \\"FUM\\""), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (not contain only ("FIE", "FEE", "FAM", "FOE") and not contain only ("FIE", "FEE", "FOE", "FAM"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not contain only ("FIE", "FEE", "FAM", "FOE") and not contain only ("FIE", "FEE", "FOE", "FUM"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.didNotContainOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FAM\\", \\"FOE\\"") + ", but " + Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FOE\\", \\"FUM\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (not contain only ("FIE", "FEE", "FUM", "FOE") and not contain only ("FIE", "FEE", "FOE", "FAM"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
(fumList should (contain only (" FEE ", " FIE ", " FOE ", " FUM ") and contain only (" FEE ", " FIE ", " FOE ", " FUM "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
it("should throw NotAllowedException with correct stack depth and message when RHS is empty") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not contain only () and not contain only ("fie", "fee", "fuu", "foe"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.onlyEmpty))
val e2 = intercept[exceptions.NotAllowedException] {
fumList should (not contain only ("fie", "fee", "fuu", "foe") and not contain only ())
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.onlyEmpty))
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not contain only ("fee", "fie", "foe", "fie", "fum") and not contain only ("fie", "fee", "fuu", "foe"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.onlyDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
fumList should (not contain only ("fie", "fee", "fuu", "foe") and not contain only ("fee", "fie", "foe", "fie", "fum"))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.onlyDuplicate))
}
it("should throw TFE with friendly reminder when single GenTraversable argument is passed and failed") {
val e1 = intercept[TestFailedException] {
One(Many("fee", "fie", "foe", "fum")) should (not contain only (Many("fee", "fie", "foe", "fum")) and not contain only ("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e1, Resources.containedOnlyElementsWithFriendlyReminder(decorateToStringValue(prettifier, One(Many("fee", "fie", "foe", "fum"))), decorateToStringValue(prettifier, Many("fee", "fie", "foe", "fum"))), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
One(Many("fee", "fie", "foe", "fum")) should (not contain only (Many("happy", "birthday", "to", "you")) and not contain only (Many("fee", "fie", "foe", "fum")))
}
checkMessageStackDepth(e2, Resources.didNotContainOnlyElementsWithFriendlyReminder(decorateToStringValue(prettifier, One(Many("fee", "fie", "foe", "fum"))), decorateToStringValue(prettifier, Many("happy", "birthday", "to", "you"))) + ", but " + Resources.containedOnlyElementsWithFriendlyReminder(decorateToStringValue(prettifier, One(Many("fee", "fie", "foe", "fum"))), decorateToStringValue(prettifier, Many("fee", "fie", "foe", "fum"))), fileName, thisLineNumber - 2)
}
}
describe("when used with (not equal (..) and not contain only (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (not equal (toList) and not contain only ("fie", "fee", "fuu", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (not equal (fumList) and not contain only ("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e1, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not equal (toList) and not contain only ("fee", "fie", "foe", "fum"))
}
checkMessageStackDepth(e2, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", but " + Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\""), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (not equal (toList) and not contain only ("FIE", "FEE", "FAM", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (not equal (fumList) and not contain only ("FIE", "FEE", "FAM", "FOE"))
}
checkMessageStackDepth(e1, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not equal (toList) and (not contain only ("FIE", "FEE", "FUM", "FOE")))
}
checkMessageStackDepth(e2, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", but " + Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (not equal (fumList) and not contain only ("FIE", "FEE", "FAM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not equal (fumList) and not contain only ("FIE", "FEE", "FUM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.didNotEqual(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)) + ", but " + Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (not equal (toList) and not contain only ("FIE", "FEE", "FAM", "FOE"))) (decided by invertedListOfStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, Resources.equaled(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)), fileName, thisLineNumber - 2)
(fumList should (not contain only (" FEE ", " FIE ", " FOE ", " FUU ") and not contain only (" FEE ", " FIE ", " FOE ", " FUU "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
it("should throw NotAllowedException with correct stack depth and message when RHS is empty") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not equal (toList) and not contain only ())
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.onlyEmpty))
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not equal (toList) and not contain only ("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.onlyDuplicate))
}
it("should throw TFE with friendly reminder when single GenTraversable argument is passed and failed") {
val e1 = intercept[TestFailedException] {
One(Many("fee", "fie", "foe", "fum")) should (not equal (toList) and not contain only (Many("fee", "fie", "foe", "fum")))
}
checkMessageStackDepth(e1, Resources.didNotEqual(decorateToStringValue(prettifier, One(Many("fee", "fie", "foe", "fum"))), decorateToStringValue(prettifier, toList)) + ", but " + Resources.containedOnlyElementsWithFriendlyReminder(decorateToStringValue(prettifier, One(Many("fee", "fie", "foe", "fum"))), decorateToStringValue(prettifier, Many("fee", "fie", "foe", "fum"))), fileName, thisLineNumber - 2)
}
}
describe("when used with (not be (..) and not contain only (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
fumList should (not be (toList) and not contain only ("fie", "fee", "fuu", "foe"))
val e1 = intercept[TestFailedException] {
fumList should (not be (fumList) and not contain only ("happy", "birthday", "to", "you"))
}
checkMessageStackDepth(e1, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not be (toList) and not contain only ("fee", "fie", "foe", "fum"))
}
checkMessageStackDepth(e2, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", but " + Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"fee\\", \\"fie\\", \\"foe\\", \\"fum\\""), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
fumList should (not be (toList) and not contain only ("FIE", "FEE", "FAM", "FOE"))
val e1 = intercept[TestFailedException] {
fumList should (not be (fumList) and not contain only ("FIE", "FEE", "FAM", "FOE"))
}
checkMessageStackDepth(e1, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
fumList should (not be (toList) and (not contain only ("FIE", "FEE", "FUM", "FOE")))
}
checkMessageStackDepth(e2, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", but " + Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(fumList should (not be (toList) and not contain only ("FIE", "FEE", "FAM", "FOE"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(fumList should (not be (toList) and not contain only ("FIE", "FEE", "FUM", "FOE"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, Resources.wasNotEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, toList)) + ", but " + Resources.containedOnlyElements(decorateToStringValue(prettifier, fumList), "\\"FIE\\", \\"FEE\\", \\"FUM\\", \\"FOE\\""), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(fumList should (not be (fumList) and not contain only ("FIE", "FEE", "FAM", "FOE"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, Resources.wasEqualTo(decorateToStringValue(prettifier, fumList), decorateToStringValue(prettifier, fumList)), fileName, thisLineNumber - 2)
(fumList should (not contain only (" FEE ", " FIE ", " FOE ", " FUU ") and not contain only (" FEE ", " FIE ", " FOE ", " FUU "))) (after being lowerCased and trimmed, after being lowerCased and trimmed)
}
it("should throw NotAllowedException with correct stack depth and message when RHS is empty") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not be (toList) and not contain only ())
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.onlyEmpty))
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
fumList should (not be (toList) and not contain only ("fee", "fie", "foe", "fie", "fum"))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.onlyDuplicate))
}
it("should throw TFE with friendly reminder when single GenTraversable argument is passed and failed") {
val e1 = intercept[TestFailedException] {
One(Many("fee", "fie", "foe", "fum")) should (not be (toList) and not contain only (Many("fee", "fie", "foe", "fum")))
}
checkMessageStackDepth(e1, Resources.wasNotEqualTo(decorateToStringValue(prettifier, One(Many("fee", "fie", "foe", "fum"))), decorateToStringValue(prettifier, toList)) + ", but " + Resources.containedOnlyElementsWithFriendlyReminder(decorateToStringValue(prettifier, One(Many("fee", "fie", "foe", "fum"))), decorateToStringValue(prettifier, Many("fee", "fie", "foe", "fum"))), fileName, thisLineNumber - 2)
}
}
}
describe("every of Everys") {
val list1s: Every[Every[Int]] = Every(Every(3, 2, 1), Every(3, 2, 1), Every(3, 2, 1))
val lists: Every[Every[Int]] = Every(Every(3, 2, 1), Every(3, 2, 1), Every(4, 3, 2))
val hiLists: Every[Every[String]] = Every(Every("hi", "hello"), Every("hi", "hello"), Every("hi", "hello"))
def allErrMsg(index: Int, message: String, lineNumber: Int, left: Any): String =
"'all' inspection failed, because: \\n" +
" at index " + index + ", " + message + " (" + fileName + ":" + (lineNumber) + ") \\n" +
"in " + decorateToStringValue(prettifier, left)
describe("used with contain only xx and contain only xx") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (list1s) should (contain only (3, 2, 1) and contain only (1, 3, 2))
atLeast (2, lists) should (contain only (3, 1, 2) and contain only (2, 3, 1))
atMost (2, lists) should (contain only (3, 1, 2) and contain only (2, 3, 1))
no (lists) should (contain only (3, 6, 9) and contain only (3, 4, 5))
val e1 = intercept[TestFailedException] {
all (lists) should (contain only (1, 2, 3) and contain only (1, 3, 2))
}
checkMessageStackDepth(e1, allErrMsg(2, decorateToStringValue(prettifier, Many(4, 3, 2)) + " did not contain only " + "(1, 2, 3)", thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (list1s) should (contain only (1, 2, 3) and contain only (1, 3, 4))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, Many(3, 2, 1)) + " contained only " + "(1, 2, 3)" + ", but " + decorateToStringValue(prettifier, Many(3, 2, 1)) + " did not contain only " + "(1, 3, 4)", thisLineNumber - 2, list1s), fileName, thisLineNumber - 2)
val e4 = intercept[TestFailedException] {
all (hiLists) should (contain only ("hi", "hello") and contain only ("ho", "hey", "howdy"))
}
checkMessageStackDepth(e4, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " contained only " + "(\\"hi\\", \\"hello\\")" + ", but " + decorateToStringValue(prettifier, Many("hi", "hello")) + " did not contain only " + "(\\"ho\\", \\"hey\\", \\"howdy\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (hiLists) should (contain only ("HELLO", "HI") and contain only ("HI", "HELLO"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (contain only ("HO", "HELLO") and contain only ("HI", "HELLO"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " did not contain only " + "(\\"HO\\", \\"HELLO\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (hiLists) should (contain only ("HELLO", "HI") and contain only ("HO", "HELLO"))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " contained only " + "(\\"HELLO\\", \\"HI\\")" + ", but " + decorateToStringValue(prettifier, Many("hi", "hello")) + " did not contain only " + "(\\"HO\\", \\"HELLO\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(all (hiLists) should (contain only ("HELLO", "HI") and contain only ("HI", "HELLO"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (contain only ("HO", "HELLO") and contain only ("HI", "HELLO"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " did not contain only " + "(\\"HO\\", \\"HELLO\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(all (hiLists) should (contain only ("HELLO", "HI") and contain only ("HO", "HELLO"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " contained only " + "(\\"HELLO\\", \\"HI\\")" + ", but " + decorateToStringValue(prettifier, Many("hi", "hello")) + " did not contain only " + "(\\"HO\\", \\"HELLO\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should throw NotAllowedException with correct stack depth and message when RHS is empty") {
if (ScalaTestVersions.BuiltForScalaVersion != "2.13") { // For 2.13, the compiler will pass in args with single argument ().
val e1 = intercept[exceptions.NotAllowedException] {
all(list1s) should (contain only() and contain only(1, 3, 2))
}
e1.failedCodeFileName.get should be(fileName)
e1.failedCodeLineNumber.get should be(thisLineNumber - 3)
e1.message should be(Some(Resources.onlyEmpty))
val e2 = intercept[exceptions.NotAllowedException] {
all(list1s) should (contain only(1, 3, 2) and contain only())
}
e2.failedCodeFileName.get should be(fileName)
e2.failedCodeLineNumber.get should be(thisLineNumber - 3)
e2.message should be(Some(Resources.onlyEmpty))
} else {
val e1 = intercept[exceptions.NotAllowedException] {
all(list1s) should (contain.only() and contain only(1, 3, 2))
}
e1.failedCodeFileName.get should be(fileName)
e1.failedCodeLineNumber.get should be(thisLineNumber - 3)
e1.message should be(Some(Resources.onlyEmpty))
val e2 = intercept[exceptions.NotAllowedException] {
all(list1s) should (contain only(1, 3, 2) and contain.only())
}
e2.failedCodeFileName.get should be(fileName)
e2.failedCodeLineNumber.get should be(thisLineNumber - 3)
e2.message should be(Some(Resources.onlyEmpty))
}
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (contain only (3, 2, 2, 1) and contain only (1, 3, 2))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.onlyDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
all (list1s) should (contain only (1, 3, 2) and contain only (3, 2, 2, 1))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.onlyDuplicate))
}
it("should throw TFE with friendly reminder when single GenTraversable argument is passed and failed") {
val e1 = intercept[TestFailedException] {
all (One(One(Many(3, 2, 1), Many(3, 2, 1), Many(4, 3, 2)))) should (contain only Many(1, 2, 3) and contain only (1, 3, 2))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, One(Many(3, 2, 1), Many(3, 2, 1), Many(4, 3, 2))) + " did not contain only " + "(" + decorateToStringValue(prettifier, Many(1, 2, 3)) + "), did you forget to say : _*", thisLineNumber - 2, One(One(Many(3, 2, 1), Many(3, 2, 1), Many(4, 3, 2)))), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (One(One(Many(1, 2, 3)))) should (contain only Many(1, 2, 3) and contain only Many(1, 3, 4))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, One(Many(1, 2, 3))) + " contained only (" + decorateToStringValue(prettifier, Many(1, 2, 3)) + "), did you forget to say : _*, but " + decorateToStringValue(prettifier, One(Many(1, 2, 3))) + " did not contain only " + "(" + decorateToStringValue(prettifier, Many(1, 3, 4)) + "), did you forget to say : _*", thisLineNumber - 2, One(One(Many(1, 2, 3)))), fileName, thisLineNumber - 2)
val e3 = intercept[TestFailedException] {
all (One(One(Many("hi", "hello")))) should (contain only Many("hi", "hello") and contain only Many("ho", "hey", "howdy"))
}
checkMessageStackDepth(e3, allErrMsg(0, decorateToStringValue(prettifier, One(Many("hi", "hello"))) + " contained only (" + decorateToStringValue(prettifier, Many("hi", "hello")) + "), did you forget to say : _*, but " + decorateToStringValue(prettifier, One(Many("hi", "hello"))) + " did not contain only " + "(" + decorateToStringValue(prettifier, Many("ho", "hey", "howdy")) + "), did you forget to say : _*", thisLineNumber - 2, One(One(Many("hi", "hello")))), fileName, thisLineNumber - 2)
}
}
describe("when used with (be (..) and contain only (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (list1s) should (be (Many(3, 2, 1)) and contain only (1, 3, 2))
atLeast (2, lists) should (be (Many(3, 2, 1)) and contain only (1, 3, 2))
atMost (2, lists) should (be (Many(3, 2, 1)) and contain only (2, 3, 1))
no (lists) should (be (Many(3, 6, 9)) and contain only (3, 4, 5))
val e1 = intercept[TestFailedException] {
all (lists) should (be (Many(3, 2, 1)) and contain only (1, 3, 2))
}
checkMessageStackDepth(e1, allErrMsg(2, decorateToStringValue(prettifier, Many(4, 3, 2)) + " was not equal to " + decorateToStringValue(prettifier, Many(3, 2, 1)), thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (list1s) should (be (Many(3, 2, 1)) and contain only (2, 3, 8))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, Many(3, 2, 1)) + " was equal to " + decorateToStringValue(prettifier, Many(3, 2, 1)) + ", but " + decorateToStringValue(prettifier, Many(3, 2, 1)) + " did not contain only " + "(2, 3, 8)", thisLineNumber - 2, list1s), fileName, thisLineNumber - 2)
val e3 = intercept[TestFailedException] {
all (hiLists) should (be (Many("hi", "hello")) and contain only ("ho", "hey", "howdy"))
}
checkMessageStackDepth(e3, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " was equal to " + decorateToStringValue(prettifier, Many("hi", "hello")) + ", but " + decorateToStringValue(prettifier, Many("hi", "hello")) + " did not contain only " + "(\\"ho\\", \\"hey\\", \\"howdy\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e4 = intercept[TestFailedException] {
all (list1s) should (be (Many(3, 2, 1)) and contain only (2, 3, 8))
}
checkMessageStackDepth(e4, allErrMsg(0, decorateToStringValue(prettifier, Many(3, 2, 1)) + " was equal to " + decorateToStringValue(prettifier, Many(3, 2, 1)) + ", but " + decorateToStringValue(prettifier, Many(3, 2, 1)) + " did not contain only " + "(2, 3, 8)", thisLineNumber - 2, list1s), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (hiLists) should (be (Many("hi", "hello")) and contain only ("HELLO", "HI"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (be (Many("HI", "HELLO")) and contain only ("HELLO", "HI"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " was not equal to " + decorateToStringValue(prettifier, Many("HI", "HELLO")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (hiLists) should (be (Many("hi", "hello")) and contain only ("HO", "HELLO"))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " was equal to " + decorateToStringValue(prettifier, Many("hi", "hello")) + ", but " + decorateToStringValue(prettifier, Many("hi", "hello")) + " did not contain only " + "(\\"HO\\", \\"HELLO\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(all (hiLists) should (be (Many("hi", "hello")) and contain only ("HELLO", "HI"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (be (Many("HI", "HELLO")) and contain only ("HELLO", "HI"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " was not equal to " + decorateToStringValue(prettifier, Many("HI", "HELLO")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(all (hiLists) should (be (Many("hi", "hello")) and contain only ("HO", "HELLO"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " was equal to " + decorateToStringValue(prettifier, Many("hi", "hello")) + ", but " + decorateToStringValue(prettifier, Many("hi", "hello")) + " did not contain only " + "(\\"HO\\", \\"HELLO\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should throw NotAllowedException with correct stack depth and message when RHS is empty") {
if (ScalaTestVersions.BuiltForScalaVersion != "2.13") { // For 2.13, the compiler will pass in args with single argument ().
val e1 = intercept[exceptions.NotAllowedException] {
all(list1s) should (be(Many(3, 2, 1)) and contain only())
}
e1.failedCodeFileName.get should be(fileName)
e1.failedCodeLineNumber.get should be(thisLineNumber - 3)
e1.message should be(Some(Resources.onlyEmpty))
} else {
val e1 = intercept[exceptions.NotAllowedException] {
all(list1s) should (be(Many(3, 2, 1)) and contain.only())
}
e1.failedCodeFileName.get should be(fileName)
e1.failedCodeLineNumber.get should be(thisLineNumber - 3)
e1.message should be(Some(Resources.onlyEmpty))
}
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (be (Many(3, 2, 1)) and contain only (3, 2, 2, 1))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.onlyDuplicate))
}
it("should throw TFE with friendly reminder when single GenTraversable argument is passed and failed") {
val e1 = intercept[TestFailedException] {
all (One(One(Many(3, 2, 1)))) should (be (One(Many(3, 2, 1))) and contain only Many(2, 3, 8))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, One(Many(3, 2, 1))) + " was equal to " + decorateToStringValue(prettifier, One(Many(3, 2, 1))) + ", but " + decorateToStringValue(prettifier, One(Many(3, 2, 1))) + " did not contain only (" + decorateToStringValue(prettifier, Many(2, 3, 8)) + "), did you forget to say : _*", thisLineNumber - 2, One(One(Many(3, 2, 1)))), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (One(One(Many("hi", "hello")))) should (be (One(Many("hi", "hello"))) and contain only Many("ho", "hey", "howdy"))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, One(Many("hi", "hello"))) + " was equal to " + decorateToStringValue(prettifier, One(Many("hi", "hello"))) + ", but " + decorateToStringValue(prettifier, One(Many("hi", "hello"))) + " did not contain only (" + decorateToStringValue(prettifier, Many("ho", "hey", "howdy")) + "), did you forget to say : _*", thisLineNumber - 2, One(One(Many("hi", "hello")))), fileName, thisLineNumber - 2)
val e3 = intercept[TestFailedException] {
all (One(One(Many(3, 2, 1)))) should (be (One(Many(3, 2, 1))) and contain only Many(2, 3, 8))
}
checkMessageStackDepth(e3, allErrMsg(0, decorateToStringValue(prettifier, One(Many(3, 2, 1))) + " was equal to " + decorateToStringValue(prettifier, One(Many(3, 2, 1))) + ", but " + decorateToStringValue(prettifier, One(Many(3, 2, 1))) + " did not contain only (" + decorateToStringValue(prettifier, Many(2, 3, 8)) + "), did you forget to say : _*", thisLineNumber - 2, One(One(Many(3, 2, 1)))), fileName, thisLineNumber - 2)
}
}
describe("when used with (not contain only xx and not contain only xx)") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (list1s) should (not contain only (3, 2, 8) and not contain only (8, 3, 4))
atLeast (2, lists) should (not contain only (3, 8, 5) and not contain only (8, 3, 4))
atMost (2, lists) should (not contain only (2, 4, 3) and contain only (4, 3, 2))
no (list1s) should (not contain only (1, 2, 3) and not contain only (1, 3, 2))
val e1 = intercept[TestFailedException] {
all (lists) should (not contain only (2, 3, 4) and not contain only (8, 3, 4))
}
checkMessageStackDepth(e1, allErrMsg(2, decorateToStringValue(prettifier, Many(4, 3, 2)) + " contained only " + "(2, 3, 4)", thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (lists) should (not contain only (3, 6, 8) and not contain only (2, 3, 4))
}
checkMessageStackDepth(e2, allErrMsg(2, decorateToStringValue(prettifier, Many(4, 3, 2)) + " did not contain only " + "(3, 6, 8)" + ", but " + decorateToStringValue(prettifier, Many(4, 3, 2)) + " contained only " + "(2, 3, 4)", thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e3 = intercept[TestFailedException] {
all (hiLists) should (not contain only ("hello", "hi") and not contain only ("ho", "hey", "howdy"))
}
checkMessageStackDepth(e3, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " contained only " + "(\\"hello\\", \\"hi\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e4 = intercept[TestFailedException] {
all (hiLists) should (not contain only ("ho", "hey", "howdy") and not contain only ("hello", "hi"))
}
checkMessageStackDepth(e4, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " did not contain only " + "(\\"ho\\", \\"hey\\", \\"howdy\\")" + ", but " + decorateToStringValue(prettifier, Many("hi", "hello")) + " contained only " + "(\\"hello\\", \\"hi\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (hiLists) should (not contain only ("HI") and not contain only ("HO"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (not contain only ("HELLO", "HI") and not contain only ("HO"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " contained only " + "(\\"HELLO\\", \\"HI\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (hiLists) should (not contain only ("HI") and not contain only ("HELLO", "HI"))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " did not contain only " + "(\\"HI\\")" + ", but " + decorateToStringValue(prettifier, Many("hi", "hello")) + " contained only " + "(\\"HELLO\\", \\"HI\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(all (hiLists) should (not contain only ("HI") and not contain only ("HO"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (not contain only ("HELLO", "HI") and not contain only ("HO"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " contained only " + "(\\"HELLO\\", \\"HI\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(all (hiLists) should (not contain only ("HI") and not contain only ("HELLO", "HI"))) (decided by upperCaseStringEquality, decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " did not contain only " + "(\\"HI\\")" + ", but " + decorateToStringValue(prettifier, Many("hi", "hello")) + " contained only " + "(\\"HELLO\\", \\"HI\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should throw NotAllowedException with correct stack depth and message when RHS is empty") {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not contain only () and not contain only (8, 3, 4))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.onlyEmpty))
val e2 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not contain only (8, 3, 4) and not contain only ())
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.onlyEmpty))
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not contain only (3, 2, 2, 1) and not contain only (8, 3, 4))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.onlyDuplicate))
val e2 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not contain only (8, 3, 4) and not contain only (3, 2, 2, 1))
}
e2.failedCodeFileName.get should be (fileName)
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some(Resources.onlyDuplicate))
}
it("should throw TFE with friendly reminder when single GenTraversable argument is passed and failed") {
val e1 = intercept[TestFailedException] {
all (One(One(Many(3, 2, 1)))) should (not contain only (Many(3, 2, 1)) and not contain only (8, 3, 4))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, One(Many(3, 2, 1))) + " contained only (" + decorateToStringValue(prettifier, Many(3, 2, 1)) + "), did you forget to say : _*", thisLineNumber - 2, One(One(Many(3, 2, 1)))), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (One(One(Many(3, 2, 1)))) should (not contain only (Many(3, 6, 8)) and not contain only (Many(3, 2, 1)))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, One(Many(3, 2, 1))) + " did not contain only (" + decorateToStringValue(prettifier, Many(3, 6, 8)) + "), did you forget to say : _*" + ", but " + decorateToStringValue(prettifier, One(Many(3, 2, 1))) + " contained only (" + decorateToStringValue(prettifier, Many(3, 2, 1)) + "), did you forget to say : _*", thisLineNumber - 2, One(One(Many(3, 2, 1)))), fileName, thisLineNumber - 2)
val e3 = intercept[TestFailedException] {
all (One(One(Many("hi", "hello")))) should (not contain only (Many("hi", "hello")) and not contain only ("ho", "hey", "howdy"))
}
checkMessageStackDepth(e3, allErrMsg(0, decorateToStringValue(prettifier, One(Many("hi", "hello"))) + " contained only (" + decorateToStringValue(prettifier, Many("hi", "hello")) + "), did you forget to say : _*", thisLineNumber - 2, One(One(Many("hi", "hello")))), fileName, thisLineNumber - 2)
val e4 = intercept[TestFailedException] {
all (One(One(Many("hi", "hello")))) should (not contain only (Many("ho", "hey", "howdy")) and not contain only (Many("hi", "hello")))
}
checkMessageStackDepth(e4, allErrMsg(0, decorateToStringValue(prettifier, One(Many("hi", "hello"))) + " did not contain only (" + decorateToStringValue(prettifier, Many("ho", "hey", "howdy")) + "), did you forget to say : _*, but " + decorateToStringValue(prettifier, One(Many("hi", "hello"))) + " contained only (" + decorateToStringValue(prettifier, Many("hi", "hello")) + "), did you forget to say : _*", thisLineNumber - 2, One(One(Many("hi", "hello")))), fileName, thisLineNumber - 2)
}
}
describe("when used with (not be (..) and not contain only (..))") {
it("should do nothing if valid, else throw a TFE with an appropriate error message") {
all (list1s) should (not be (One(2)) and not contain only (8, 3, 4))
atLeast (2, lists) should (not be (One(3)) and not contain only (8, 3, 4))
atMost (2, lists) should (not be (Many(4, 3, 2)) and not contain only (3, 4, 2))
no (list1s) should (not be (Many(3, 2, 1)) and not contain only (1, 2, 3))
val e1 = intercept[TestFailedException] {
all (lists) should (not be (Many(4, 3, 2)) and not contain only (8, 3, 4))
}
checkMessageStackDepth(e1, allErrMsg(2, decorateToStringValue(prettifier, Many(4, 3, 2)) + " was equal to " + decorateToStringValue(prettifier, Many(4, 3, 2)), thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (lists) should (not be (One(3)) and not contain only (2, 3, 4))
}
checkMessageStackDepth(e2, allErrMsg(2, decorateToStringValue(prettifier, Many(4, 3, 2)) + " was not equal to " + decorateToStringValue(prettifier, One(3)) + ", but " + decorateToStringValue(prettifier, Many(4, 3, 2)) + " contained only " + "(2, 3, 4)", thisLineNumber - 2, lists), fileName, thisLineNumber - 2)
val e3 = intercept[TestFailedException] {
all (hiLists) should (not be (Many("hi", "hello")) and not contain only ("ho", "hey", "howdy"))
}
checkMessageStackDepth(e3, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " was equal to " + decorateToStringValue(prettifier, Many("hi", "hello")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e4 = intercept[TestFailedException] {
all (hiLists) should (not be (One("ho")) and not contain only ("hello", "hi"))
}
checkMessageStackDepth(e4, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " was not equal to " + decorateToStringValue(prettifier, One("ho")) + ", but " + decorateToStringValue(prettifier, Many("hi", "hello")) + " contained only " + "(\\"hello\\", \\"hi\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use the implicit Equality in scope") {
implicit val ise = upperCaseStringEquality
all (hiLists) should (not be (One("ho")) and not contain only ("HO", "HELLO"))
val e1 = intercept[TestFailedException] {
all (hiLists) should (not be (Many("hi", "hello")) and not contain only ("HELLO", "HI"))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " was equal to " + decorateToStringValue(prettifier, Many("hi", "hello")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (hiLists) should (not be (One("ho")) and not contain only ("HI", "HELLO"))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " was not equal to " + decorateToStringValue(prettifier, One("ho")) + ", but " + decorateToStringValue(prettifier, Many("hi", "hello")) + " contained only " + "(\\"HI\\", \\"HELLO\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should use an explicitly provided Equality") {
(all (hiLists) should (not be (One("ho")) and not contain only ("HO", "HELLO"))) (decided by upperCaseStringEquality)
val e1 = intercept[TestFailedException] {
(all (hiLists) should (not be (Many("hi", "hello")) and not contain only ("HELLO", "HI"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " was equal to " + decorateToStringValue(prettifier, Many("hi", "hello")), thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
(all (hiLists) should (not be (One("ho")) and not contain only ("HI", "HELLO"))) (decided by upperCaseStringEquality)
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, Many("hi", "hello")) + " was not equal to " + decorateToStringValue(prettifier, One("ho")) + ", but " + decorateToStringValue(prettifier, Many("hi", "hello")) + " contained only " + "(\\"HI\\", \\"HELLO\\")", thisLineNumber - 2, hiLists), fileName, thisLineNumber - 2)
}
it("should throw NotAllowedException with correct stack depth and message when RHS is empty") {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not be (One(2)) and not contain only ())
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.onlyEmpty))
}
it("should throw NotAllowedException with correct stack depth and message when RHS contain duplicated value") {
val e1 = intercept[exceptions.NotAllowedException] {
all (list1s) should (not be (One(2)) and not contain only (3, 2, 2, 1))
}
e1.failedCodeFileName.get should be (fileName)
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some(Resources.onlyDuplicate))
}
it("should throw TFE with friendly reminder when single GenTraversable argument is passed and failed") {
val e1 = intercept[TestFailedException] {
all (One(One(Many(3, 2, 1)))) should (not be (One(3)) and not contain only (Many(3, 2, 1)))
}
checkMessageStackDepth(e1, allErrMsg(0, decorateToStringValue(prettifier, One(Many(3, 2, 1))) + " was not equal to " + decorateToStringValue(prettifier, One(3)) + ", but " + decorateToStringValue(prettifier, One(Many(3, 2, 1))) + " contained only (" + decorateToStringValue(prettifier, Many(3, 2, 1)) + "), did you forget to say : _*", thisLineNumber - 2, One(One(Many(3, 2, 1)))), fileName, thisLineNumber - 2)
val e2 = intercept[TestFailedException] {
all (One(One(Many("hi", "hello")))) should (not be (One("ho")) and not contain only (Many("hi", "hello")))
}
checkMessageStackDepth(e2, allErrMsg(0, decorateToStringValue(prettifier, One(Many("hi", "hello"))) + " was not equal to " + decorateToStringValue(prettifier, One("ho")) + ", but " + decorateToStringValue(prettifier, One(Many("hi", "hello"))) + " contained only (" + decorateToStringValue(prettifier, Many("hi", "hello")) + "), did you forget to say : _*", thisLineNumber - 2, One(One(Many("hi", "hello")))), fileName, thisLineNumber - 2)
}
}
}
}
| dotty-staging/scalatest | scalatest-test/src/test/scala/org/scalatest/EveryShouldContainOnlyLogicalAndSpec.scala | Scala | apache-2.0 | 74,268 |
/*
* Tranquility.
* Copyright 2013, 2014, 2015 Metamarkets Group, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.metamx.tranquility.storm.common
import backtype.storm.LocalCluster
import backtype.storm.generated.KillOptions
import com.metamx.common.scala.Logging
import com.metamx.common.scala.Predef._
import com.metamx.common.scala.control._
import org.scala_tools.time.Imports._
import scala.collection.JavaConverters._
trait StormRequiringSuite extends Logging
{
def withLocalStorm[A](f: (LocalCluster => A)): A = {
val storm = StormRequiringSuite.sharedCluster
def killTopology(name: String) {
retryOnErrors(ifException[Exception] untilPeriod 60.seconds) {
log.info("Killing topology: %s", name)
storm.killTopologyWithOpts(name, new KillOptions() withEffect (_.set_wait_secs(0)))
}
}
def getTopologies() = {
retryOnErrors(ifException[Exception] untilPeriod 60.seconds) {
Option(storm.getClusterInfo) map (_.get_topologies().asScala) getOrElse {
throw new IllegalStateException("getClusterInfo returned null!")
}
}
}
def killAllTopologies() = {
for (topology <- getTopologies()) {
killTopology(topology.get_name())
}
val start = System.currentTimeMillis()
while (getTopologies().nonEmpty && System.currentTimeMillis() < start + 60000L) {
log.info("Waiting for topologies to die...")
Thread.sleep(2000)
}
val topologies = getTopologies()
if (topologies.nonEmpty) {
throw new IllegalStateException("Topologies remain: %s" format topologies.map(_.get_name()))
}
}
try {
f(storm)
}
finally {
killAllTopologies()
}
}
}
object StormRequiringSuite
{
private lazy val sharedCluster: LocalCluster = new LocalCluster()
}
| deepikakhera/spark-tranquility | storm/src/main/scala/com/metamx/tranquility/storm/common/StormRequiringSuite.scala | Scala | apache-2.0 | 2,363 |
package org.bitcoins.crypto
import scodec.bits.ByteVector
case class CurveCoordinate(bytes: ByteVector)
extends FiniteFieldMember[CurveCoordinate](CryptoParams.getCurvePrime, 32) {
override def fieldObj: FiniteFieldObject[CurveCoordinate] = CurveCoordinate
}
object CurveCoordinate
extends FiniteFieldObject[CurveCoordinate](CryptoParams.getCurvePrime, 32) {
override def fieldMemberConstructor(bytes: ByteVector): CurveCoordinate = {
new CurveCoordinate(bytes)
}
}
| bitcoin-s/bitcoin-s | crypto/src/main/scala/org/bitcoins/crypto/CurveCoordinate.scala | Scala | mit | 489 |
/*
* (c) Copyright 2016 Hewlett Packard Enterprise Development LP
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package cogx.compiler.optimizer
import cogx.compiler.codegenerator.KernelCircuit
import cogx.compiler.codegenerator.opencl.cpukernels.RecurrentFieldKernel
import cogx.platform.opencl.OpenCLKernelCodeGenParams
import cogx.platform.types.VirtualFieldRegister
import cogx.runtime.execution.Profiler
/** Interface for a component that optimizes a kernel circuit. Subclasses will
* implement the "optimize" method that does an in-place optimization of the
* kernel circuit passed to it, returning true if any improvements were made.
*
* @author Greg Snider
*/
private[cogx]
trait Optimizer {
/** Compute and return an optimized DAG.
*
* @param dag KernelCircuit possibly containing recurrent field kernels that is to be optimized.
* @param codeGenParams A bundle of device parameters that affect kernel code generation and optimization.
* @param profiler The profiler to use to pick the best variant
* @param report True if verbosity is desired.
* @return The number of optimizations made.
*/
def optimize(dag: KernelCircuit, codeGenParams: OpenCLKernelCodeGenParams, profiler: Profiler, report: Boolean = true) : Int
/** Fix up recurrences, post merge; MUST BE CALLED BY ALL OPTIMIZERS AT
* THE COMPLETION OF THEIR OPTIMIZATION!
*
* Feedback paths from one kernel to another are not represented directly
* in a KernelCircuit to prevent cycles (KernelCircuit must be acyclic). So
* feedback is represented by an "out-of-band" pointer. The kernel
* receiving the feedback is a RecurrentFieldKernel, which contains a
* field that represents the kernel which is supplying the feedback. Since
* the HyperKernel merger can't see that field, it must be patched up here.
* The `mergeMap` contains the information we need. Every time a kernel is
* removed by merging, a new kernel takes over its outputs. This maintains
* the map of old to new so that we can fix things up.
*
* Similarly, Actuators maintain an "out-of-band" pointer to the kernel that
* sources the actuator data stream, and this must be maintained as
* optimizers remove the original kernel pointed to.
*
* @param dag KernelCircuit possibly containing recurrent field kernels that
* need to have their recurrent inputs fixed up.
*/
protected def fixRecurrences(dag: KernelCircuit) {
dag.traversePreorder {
_ match {
case rKernel: RecurrentFieldKernel =>
rKernel.recurrence =
dag.findStolenOutput(rKernel.recurrence).asInstanceOf[VirtualFieldRegister]
case _ =>
}
}
}
} | hpe-cct/cct-core | src/main/scala/cogx/compiler/optimizer/Optimizer.scala | Scala | apache-2.0 | 3,244 |
class Tokens { abstract class Token }
trait TokenParsers { val lexical: Tokens }
class MyTokenParsers extends TokenParsers {
import lexical.*
val lexical = new Tokens
}
| lampepfl/dotty | tests/untried/neg/t1845.scala | Scala | apache-2.0 | 177 |
package edu.arizona.sista.twitter4food
import org.clulab.struct.Counter
import java.util.regex.Pattern
import scala.collection.mutable.ListBuffer
import java.io.{FileWriter, PrintWriter}
/**
*
* User: mihais
* Date: 10/25/13
*/
class GeoTagger(val statePatterns: Map[String, List[Pattern]]) {
def this(stateFilename: String) =
this(GeoTag.loadStatePatterns(stateFilename))
def this(source: io.Source = io.Source.fromURL(GeoTagger.stateResource)) =
this(GeoTag.loadStatePatterns(source))
def normalizeLocation(loc:String, timeZone:String):Option[String] = GeoTag.normalizeLocation(loc, timeZone,
statePatterns)
}
object GeoTagger {
val stateResource = getClass.getResource("states.txt")
}
object GeoTag {
def main(args:Array[String]) {
val stateFile = args(0)
val in_file = args(1)
val out_file = args(2)
val states = loadStatePatterns(stateFile)
//println(normalizeLocation("los angeles, ca", states))
//println(normalizeLocation("new york, ny", states))
val os = new PrintWriter(new FileWriter(out_file))
geoTagTweets(states, in_file, os)
os.close()
}
def geoTagTweets(states:Map[String, List[Pattern]], file:String, os:PrintWriter) {
var lineCount = 0
var tweetsWithLoc = 0
var totalTweets = 0
var tweetsNormed = 0
val locStats = new Counter[String]()
val stateStats = new Counter[String]()
val unnormLocs = new Counter[String]()
var metaLine:String = null
for (line <- Utils.loadFile(file).getLines) {
lineCount += 1
if(lineCount % 3 == 1) {
metaLine = line
} else if(lineCount % 3 == 0) {
totalTweets += 1
val bits = metaLine.split('\t')
val loc = bits(3).toLowerCase()
val timeZone = bits(6).toLowerCase()
locStats.incrementCount(loc)
if(loc != "nil") {
tweetsWithLoc += 1
normalizeLocation(loc, timeZone, states) match {
case Some(state) => {
tweetsNormed += 1
stateStats.incrementCount(state)
// os.println(state + "\t" + line.replaceAll("\\s+", " "))
}
case None => unnormLocs.incrementCount(loc)
}
}
}
}
val showLocs = false
if(showLocs) {
for(t <- locStats.sorted) {
if(t._2 > 1) {
println(t._1 + "\t" + t._2)
}
}
}
val showStates = false
if(showStates) {
for(t <- stateStats.sorted) {
println(t._1 + "\t" + t._2)
}
}
val showUnnorm = false
if(showUnnorm) {
for(t <- unnormLocs.sorted) {
if(t._2 > 10) {
println(t._1 + "\t" + t._2)
}
}
}
/*
println("Total tweets: " + totalTweets)
println("Tweets with location info: " + tweetsWithLoc)
println("Tweets that could be normalized: " + tweetsNormed)
*/
}
val USA_SUFFIX = Pattern.compile("([\\s*,]?\\s*USA?\\s*$)|([\\s*,]?\\s*united\\s*states\\s*(of\\s*america)?\\s*$)", Pattern.CASE_INSENSITIVE)
def normalizeLocation(loc:String, timeZone:String, states:Map[String, List[Pattern]]):Option[String] = {
// remove any potential USA suffixes
val m = USA_SUFFIX.matcher(loc)
var l = loc
if(m.matches()) {
l = loc.substring(0, m.start())
//println(s"STRIPPED USA: [$loc] to [$l]")
}
if(l == "") return None
// special case: la
if(loc == "la") {
if(timeZone == null) {
// can't normalize this one
return None
} else if(timeZone.contains("pacific")) {
//println("LA NORMALIZED TO CA")
return Some("CA")
} else if(timeZone.contains("central")) {
//println("LA NORMALIZED TO LA")
return Some("LA")
} else {
return None
}
}
// return the first state that matches the location
for(st <- states.keys) {
for(p <- states(st)) {
if(p.matcher(l).find()) {
//println(s"[$l] normalized to [$st]")
return Some(st)
}
}
}
None
}
def loadStatePatterns(source: io.Source) = {
val states = new collection.mutable.HashMap[String, List[Pattern]]
var count = 0
for (line <- source.getLines) {
count += 1
val bits = line.split('\t')
val st = bits(0).trim
val pb = new ListBuffer[Pattern]
for(i <- 1 until bits.length) {
val core = bits(i).trim
pb += Pattern.compile("\\s+" + core + "\\s*$", Pattern. CASE_INSENSITIVE)
pb += Pattern.compile("^" + core + "\\s*" + "((,\\s*)?" + st + ")?" + "$", Pattern.CASE_INSENSITIVE)
}
states += st -> pb.toList
// println("PATTERNS FOR " + st + ": " + pb.toList)
}
// println(s"Loaded $count states.")
states.toMap
}
def loadStatePatterns(file:String): Map[String, List[Pattern]] = {
loadStatePatterns(io.Source.fromFile(file))
}
}
| clulab/twitter4food | src/main/scala/edu/arizona/sista/twitter4food/GeoTag.scala | Scala | apache-2.0 | 4,885 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util.collection
import scala.reflect.ClassTag
import scala.util.Random
import org.scalatest.FunSuite
import org.apache.spark.util.SizeEstimator
class SizeTrackerSuite extends FunSuite {
val NORMAL_ERROR = 0.20
val HIGH_ERROR = 0.30
import SizeTrackerSuite._
test("vector fixed size insertions") {
testVector[Long](10000, i => i.toLong)
testVector[(Long, Long)](10000, i => (i.toLong, i.toLong))
testVector[LargeDummyClass](10000, i => new LargeDummyClass)
}
test("vector variable size insertions") {
val rand = new Random(123456789)
def randString(minLen: Int, maxLen: Int): String = {
"a" * (rand.nextInt(maxLen - minLen) + minLen)
}
testVector[String](10000, i => randString(0, 10))
testVector[String](10000, i => randString(0, 100))
testVector[String](10000, i => randString(90, 100))
}
test("map fixed size insertions") {
testMap[Int, Long](10000, i => (i, i.toLong))
testMap[Int, (Long, Long)](10000, i => (i, (i.toLong, i.toLong)))
testMap[Int, LargeDummyClass](10000, i => (i, new LargeDummyClass))
}
test("map variable size insertions") {
val rand = new Random(123456789)
def randString(minLen: Int, maxLen: Int): String = {
"a" * (rand.nextInt(maxLen - minLen) + minLen)
}
testMap[Int, String](10000, i => (i, randString(0, 10)))
testMap[Int, String](10000, i => (i, randString(0, 100)))
testMap[Int, String](10000, i => (i, randString(90, 100)))
}
test("map updates") {
val rand = new Random(123456789)
def randString(minLen: Int, maxLen: Int): String = {
"a" * (rand.nextInt(maxLen - minLen) + minLen)
}
testMap[String, Int](10000, i => (randString(0, 10000), i))
}
def testVector[T: ClassTag](numElements: Int, makeElement: Int => T) {
val vector = new SizeTrackingVector[T]
for (i <- 0 until numElements) {
val item = makeElement(i)
vector += item
expectWithinError(vector, vector.estimateSize(), if (i < 32) HIGH_ERROR else NORMAL_ERROR)
}
}
def testMap[K, V](numElements: Int, makeElement: (Int) => (K, V)) {
val map = new SizeTrackingAppendOnlyMap[K, V]
for (i <- 0 until numElements) {
val (k, v) = makeElement(i)
map(k) = v
expectWithinError(map, map.estimateSize(), if (i < 32) HIGH_ERROR else NORMAL_ERROR)
}
}
def expectWithinError(obj: AnyRef, estimatedSize: Long, error: Double) {
val betterEstimatedSize = SizeEstimator.estimate(obj)
assert(betterEstimatedSize * (1 - error) < estimatedSize,
s"Estimated size $estimatedSize was less than expected size $betterEstimatedSize")
assert(betterEstimatedSize * (1 + 2 * error) > estimatedSize,
s"Estimated size $estimatedSize was greater than expected size $betterEstimatedSize")
}
}
private object SizeTrackerSuite {
/**
* Run speed tests for size tracking collections.
*/
def main(args: Array[String]): Unit = {
if (args.size < 1) {
println("Usage: SizeTrackerSuite [num elements]")
System.exit(1)
}
val numElements = args(0).toInt
vectorSpeedTest(numElements)
mapSpeedTest(numElements)
}
/**
* Speed test for SizeTrackingVector.
*
* Results for 100000 elements (possibly non-deterministic):
* PrimitiveVector 15 ms
* SizeTracker 51 ms
* SizeEstimator 2000 ms
*/
def vectorSpeedTest(numElements: Int): Unit = {
val baseTimes = for (i <- 0 until 10) yield time {
val vector = new PrimitiveVector[LargeDummyClass]
for (i <- 0 until numElements) {
vector += new LargeDummyClass
}
}
val sampledTimes = for (i <- 0 until 10) yield time {
val vector = new SizeTrackingVector[LargeDummyClass]
for (i <- 0 until numElements) {
vector += new LargeDummyClass
vector.estimateSize()
}
}
val unsampledTimes = for (i <- 0 until 3) yield time {
val vector = new PrimitiveVector[LargeDummyClass]
for (i <- 0 until numElements) {
vector += new LargeDummyClass
SizeEstimator.estimate(vector)
}
}
printSpeedTestResult("SizeTrackingVector", baseTimes, sampledTimes, unsampledTimes)
}
/**
* Speed test for SizeTrackingAppendOnlyMap.
*
* Results for 100000 elements (possibly non-deterministic):
* AppendOnlyMap 30 ms
* SizeTracker 41 ms
* SizeEstimator 1666 ms
*/
def mapSpeedTest(numElements: Int): Unit = {
val baseTimes = for (i <- 0 until 10) yield time {
val map = new AppendOnlyMap[Int, LargeDummyClass]
for (i <- 0 until numElements) {
map(i) = new LargeDummyClass
}
}
val sampledTimes = for (i <- 0 until 10) yield time {
val map = new SizeTrackingAppendOnlyMap[Int, LargeDummyClass]
for (i <- 0 until numElements) {
map(i) = new LargeDummyClass
map.estimateSize()
}
}
val unsampledTimes = for (i <- 0 until 3) yield time {
val map = new AppendOnlyMap[Int, LargeDummyClass]
for (i <- 0 until numElements) {
map(i) = new LargeDummyClass
SizeEstimator.estimate(map)
}
}
printSpeedTestResult("SizeTrackingAppendOnlyMap", baseTimes, sampledTimes, unsampledTimes)
}
def printSpeedTestResult(
testName: String,
baseTimes: Seq[Long],
sampledTimes: Seq[Long],
unsampledTimes: Seq[Long]): Unit = {
println(s"Average times for $testName (ms):")
println(" Base - " + averageTime(baseTimes))
println(" SizeTracker (sampled) - " + averageTime(sampledTimes))
println(" SizeEstimator (unsampled) - " + averageTime(unsampledTimes))
println()
}
def time(f: => Unit): Long = {
val start = System.currentTimeMillis()
f
System.currentTimeMillis() - start
}
def averageTime(v: Seq[Long]): Long = {
v.sum / v.size
}
private class LargeDummyClass {
val arr = new Array[Int](100)
}
}
| Dax1n/spark-core | core/src/test/scala/org/apache/spark/util/collection/SizeTrackerSuite.scala | Scala | apache-2.0 | 6,743 |
package lila.chat
import org.joda.time.DateTime
import lila.user.User
final class ChatPanic {
private var until: Option[DateTime] = none
def allowed(u: User): Boolean =
!enabled || {
(u.count.gameH > 10 && u.createdSinceDays(1)) || u.isVerified
}
def allowed(id: User.ID, fetch: User.ID => Fu[Option[User]]): Fu[Boolean] =
if (enabled) fetch(id) dmap { _ ?? allowed }
else fuTrue
def enabled =
until exists { d =>
d.isAfterNow || {
until = none
false
}
}
def get = until
def start() = {
logger.warn("Chat Panic enabled")
until = DateTime.now.plusMinutes(180).some
}
def stop() = {
logger.warn("Chat Panic disabled")
until = none
}
def set(v: Boolean) = if (v) start() else stop()
}
| luanlv/lila | modules/chat/src/main/ChatPanic.scala | Scala | mit | 787 |
package io.github.reggert.reb4j.test
import io.github.reggert.reb4j.{Entity, Raw}
import org.scalacheck.{Arbitrary, Gen}
trait RawGenerators extends UtilGenerators with LiteralGenerators {
implicit val arbEntity: Arbitrary[Entity] = Arbitrary(genEntity)
implicit val arbRawCompound : Arbitrary[Raw.Compound] =
Arbitrary(Gen.sized {size => if (size < 2) Gen.fail else Gen.choose(2, size) flatMap genRawCompound })
implicit val arbEscapedLiteral: Arbitrary[Raw.EscapedLiteral] =
Arbitrary(Gen.sized {size => if (size < 1) Gen.fail else Gen.choose(1, size) flatMap genEscapedLiteral })
implicit val arbRaw: Arbitrary[Raw] =
Arbitrary(Gen.sized {size => if (size < 1) Gen.fail else Gen.choose(1, size) flatMap genRaw })
//noinspection ZeroIndexToHead
def genRawCompound(size : Int) : Gen[Raw.Compound] = {
require(size >= 2)
val sizesGen = size match
{
case 2 => Gen.const(1::1::Nil)
case _ => genSizes(size) filter {_.length >= 2}
}
for {
sizes <- sizesGen
subtreeGens = for {s <- sizes} yield genRaw(s)
subtreesGen = (Gen.const(Nil : List[Raw]) /: subtreeGens) {(ssGen, sGen) =>
for {
ss <- ssGen
s <- sGen
} yield s::ss
}
subtrees <- subtreesGen
} yield ((subtrees(0) andThen subtrees(1)) /: subtrees.drop(2)) {_ andThen _}
}
def genRaw(size : Int) : Gen[Raw] = {
require (size > 0)
size match {
case 1 => Gen.oneOf(genEscapedLiteral(1), genEntity)
case _ => Gen.oneOf(genEscapedLiteral(size), Gen.lzy(genRawCompound(size)))
}
}
def genEntity : Gen[Entity] = Gen.oneOf(
Entity.ANY_CHAR,
Entity.LINE_BEGIN,
Entity.LINE_END,
Entity.WORD_BOUNDARY,
Entity.NONWORD_BOUNDARY,
Entity.INPUT_BEGIN,
Entity.MATCH_END,
Entity.INPUT_END_SKIP_EOL,
Entity.INPUT_END
)
def genEscapedLiteral(size : Int) : Gen[Raw.EscapedLiteral] =
genLiteral(size) map {lit => new Raw.EscapedLiteral(lit)}
}
object RawGenerators extends RawGenerators | reggert/reb4j | src/test/scala/io/github/reggert/reb4j/test/RawGenerators.scala | Scala | lgpl-3.0 | 1,948 |
package dotty
import scala.language.unsafeNulls
import java.nio.file._
/** Runtime properties from defines or environmnent */
object Properties {
/** If property is unset or "TRUE" we consider it `true` */
private def propIsNullOrTrue(prop: String): Boolean = {
val prop = System.getProperty("dotty.tests.interactive")
prop == null || prop == "TRUE"
}
/** Are we running on the CI? */
val isRunByCI: Boolean = sys.env.isDefinedAt("DOTTY_CI_RUN")
|| sys.env.isDefinedAt("DRONE") // TODO remove this when we drop Drone
val testCache: Path =
sys.env.get("DOTTY_TEST_CACHE").map(Paths.get(_)).getOrElse {
Paths.get(sys.props("user.home"), ".cache", "dotty", "test")
}
/** Tests should run interactive? */
val testsInteractive: Boolean = propIsNullOrTrue("dotty.tests.interactive")
/** Filter out tests not matching the regex supplied by "dotty.tests.filter"
* define
*/
val testsFilter: List[String] = sys.props.get("dotty.tests.filter").fold(Nil)(_.split(',').toList)
/** Tests should override the checkfiles with the current output */
val testsUpdateCheckfile: Boolean =
sys.props.getOrElse("dotty.tests.updateCheckfiles", "FALSE") == "TRUE"
/** When set, the run tests are only compiled - not run, a warning will be
* issued
*/
val testsNoRun: Boolean = sys.props.get("dotty.tests.norun").isDefined
/** Should Unit tests run in safe mode?
*
* For run tests this means that we respawn child JVM processes after each
* test, so that they are never reused.
*/
val testsSafeMode: Boolean = sys.props.isDefinedAt("dotty.tests.safemode")
/** Extra directory containing sources for the compiler */
def dottyCompilerManagedSources: Path = Paths.get(sys.props("dotty.tests.dottyCompilerManagedSources"))
/** dotty-interfaces jar */
def dottyInterfaces: String = sys.props("dotty.tests.classes.dottyInterfaces")
/** dotty-library jar */
def dottyLibrary: String = sys.props("dotty.tests.classes.dottyLibrary")
/** dotty-library-js jar */
def dottyLibraryJS: String = sys.props("dotty.tests.classes.dottyLibraryJS")
/** dotty-compiler jar */
def dottyCompiler: String = sys.props("dotty.tests.classes.dottyCompiler")
/** dotty-staging jar */
def dottyStaging: String = sys.props("dotty.tests.classes.dottyStaging")
/** dotty-tasty-inspector jar */
def dottyTastyInspector: String = sys.props("dotty.tests.classes.dottyTastyInspector")
/** tasty-core jar */
def tastyCore: String = sys.props("dotty.tests.classes.tastyCore")
/** compiler-interface jar */
def compilerInterface: String = sys.props("dotty.tests.classes.compilerInterface")
/** scala-library jar */
def scalaLibrary: String = sys.props("dotty.tests.classes.scalaLibrary")
/** scala-asm jar */
def scalaAsm: String = sys.props("dotty.tests.classes.scalaAsm")
/** jline-terminal jar */
def jlineTerminal: String = sys.props("dotty.tests.classes.jlineTerminal")
/** jline-reader jar */
def jlineReader: String = sys.props("dotty.tests.classes.jlineReader")
/** scalajs-library jar */
def scalaJSLibrary: String = sys.props("dotty.tests.classes.scalaJSLibrary")
}
| dotty-staging/dotty | compiler/test/dotty/Properties.scala | Scala | apache-2.0 | 3,181 |
package dotty.tools.dotc.classpath
import dotty.tools.io.ClassRepresentation
import dotty.tools.io.{AbstractFile, VirtualDirectory}
import FileUtils._
import java.net.URL
import dotty.tools.io.ClassPath
case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath with DirectoryLookup[ClassFileEntryImpl] with NoSourcePaths {
type F = AbstractFile
// From AbstractFileClassLoader
private final def lookupPath(base: AbstractFile)(pathParts: Seq[String], directory: Boolean): AbstractFile = {
var file: AbstractFile = base
val dirParts = pathParts.init.iterator
while (dirParts.hasNext) {
val dirPart = dirParts.next
file = file.lookupName(dirPart, directory = true)
if (file == null)
return null
}
file.lookupName(pathParts.last, directory = directory)
}
protected def emptyFiles: Array[AbstractFile] = Array.empty
protected def getSubDir(packageDirName: String): Option[AbstractFile] =
Option(lookupPath(dir)(packageDirName.split(java.io.File.separator).toIndexedSeq, directory = true))
protected def listChildren(dir: AbstractFile, filter: Option[AbstractFile => Boolean] = None): Array[F] = filter match {
case Some(f) => dir.iterator.filter(f).toArray
case _ => dir.toArray
}
def getName(f: AbstractFile): String = f.name
def toAbstractFile(f: AbstractFile): AbstractFile = f
def isPackage(f: AbstractFile): Boolean = f.isPackage
// mimic the behavior of the old nsc.util.DirectoryClassPath
def asURLs: Seq[URL] = Seq(new URL(dir.name))
def asClassPathStrings: Seq[String] = Seq(dir.path)
override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl
def findClassFile(className: String): Option[AbstractFile] = {
val relativePath = FileUtils.dirPath(className) + ".class"
Option(lookupPath(dir)(relativePath.split(java.io.File.separator).toIndexedSeq, directory = false))
}
private[dotty] def classes(inPackage: String): Seq[ClassFileEntry] = files(inPackage)
protected def createFileEntry(file: AbstractFile): ClassFileEntryImpl = ClassFileEntryImpl(file)
protected def isMatchingFile(f: AbstractFile): Boolean = f.isClass
}
| som-snytt/dotty | compiler/src/dotty/tools/dotc/classpath/VirtualDirectoryClassPath.scala | Scala | apache-2.0 | 2,222 |
/*
* Copyright 2013 - 2015, Daniel Krzywicki <[email protected]>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package pl.edu.agh.scalamas.mas.sync
import akka.actor._
import pl.edu.agh.scalamas.mas.Logic
import pl.edu.agh.scalamas.mas.RootEnvironment.{Add, migration}
import pl.edu.agh.scalamas.mas.sync.SyncEnvironment.Loop
object SyncEnvironment {
case object Loop
def props(logic: Logic) = Props(classOf[SyncEnvironment], logic)
}
/**
* A synchronous island implementation. This actor spins in a messages loop. In each iteration it updates synchronously
* the population accordingly to the behaviour and meetings functions. This islands supports migration, assuming its
* parent is a RootEnvironment.
*
* @param logic the callbacks for the simulation
*/
class SyncEnvironment(logic: Logic) extends Actor {
var population = logic.initialPopulation
self ! Loop
override def receive = {
case Loop =>
population = population.groupBy(logic.behaviourFunction).flatMap(migration orElse logic.meetingsFunction).toList
self ! Loop
case Add(agent) => population :+= agent
}
} | ros3n/IntOb | core/src/main/scala/pl/edu/agh/scalamas/mas/sync/SyncEnvironment.scala | Scala | mit | 2,158 |
package uima.rs.ja
import org.apache.uima.jcas.JCas
import uima.rs.MultiLingualQuestion
import us.feliscat.m17n.Japanese
import us.feliscat.types.Question
import us.feliscat.util.uima.JCasID
/**
* <pre>
* Created on 2017/03/21.
* </pre>
*
* @author K.Sakamoto
*/
class JapaneseQuestion(casId: JCasID,
aJCas: JCas,
question: Question) extends MultiLingualQuestion(casId, aJCas, question) with Japanese
| ktr-skmt/FelisCatusZero-multilingual | src/main/scala/uima/rs/ja/JapaneseQuestion.scala | Scala | apache-2.0 | 461 |
package com.theseventhsense.datetime
import com.theseventhsense.utils.types.SSDateTime
import com.theseventhsense.utils.types.SSDateTime.DateTime.Format
import com.theseventhsense.utils.types.SSDateTime.{DateTime, DayOfWeek, HourOfDay, TimeZone}
/**
* Created by erik on 6/15/16.
*/
abstract class AbstractRichDateTime(dateTime: DateTime) extends Serializable {
def withZoneSameInstant(timeZone: TimeZone): DateTime
def withZoneSameLocal(timeZone: TimeZone): DateTime
def withMillisOfSecond(millisOfSecond: Int): DateTime
def withSecondOfMinute(secondOfMinute: Int): DateTime
def withMinuteOfHour(minuteOfHour: Int): DateTime
def withHourNumOfDay(hourOfDay: Int): DateTime
def withHourOfDay(hourOfDay: HourOfDay): DateTime =
withHourNumOfDay(hourOfDay.num)
def withDayNumOfWeek(dayOfWeekNum: Int): DateTime
def withDayOfWeek(dayOfWeek: DayOfWeek): DateTime
def withNextEvenHour: DateTime
def withRoundedMinute: DateTime
def plusMonths(weeks: Int): DateTime
def minusMonths(weeks: Int): DateTime
def plusYears(year: Int): DateTime
def minusYears(years: Int): DateTime
def format(format: Format): String
def atStartOfDay: DateTime
def secondOfDay: Int
def minuteOfHour: Int
def hourOfDay: HourOfDay
def dayOfMonth: SSDateTime.DayOfMonth
def month: SSDateTime.Month
def dayOfYear: Int
def dayOfWeek: DayOfWeek
def year: SSDateTime.Year
def toIsoString: String
}
abstract class AbstractRichDateTimeOps extends Serializable {
def parse(s: String): Either[DateTime.ParseError, DateTime]
}
| 7thsense/utils-datetime | shared/src/main/scala/com/theseventhsense/datetime/AbstractRichDateTime.scala | Scala | mit | 1,548 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.expressions
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.analysis.TypeCheckResult
import org.apache.spark.sql.catalyst.expressions.codegen.{CodegenContext, ExprCode}
import org.apache.spark.sql.catalyst.expressions.codegen.Block._
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.UTF8String
import org.apache.spark.util.collection.unsafe.sort.PrefixComparators._
abstract sealed class SortDirection {
def sql: String
def defaultNullOrdering: NullOrdering
}
abstract sealed class NullOrdering {
def sql: String
}
case object Ascending extends SortDirection {
override def sql: String = "ASC"
override def defaultNullOrdering: NullOrdering = NullsFirst
}
case object Descending extends SortDirection {
override def sql: String = "DESC"
override def defaultNullOrdering: NullOrdering = NullsLast
}
case object NullsFirst extends NullOrdering{
override def sql: String = "NULLS FIRST"
}
case object NullsLast extends NullOrdering{
override def sql: String = "NULLS LAST"
}
/**
* An expression that can be used to sort a tuple. This class extends expression primarily so that
* transformations over expression will descend into its child.
* `sameOrderExpressions` is a set of expressions with the same sort order as the child. It is
* derived from equivalence relation in an operator, e.g. left/right keys of an inner sort merge
* join.
*/
case class SortOrder(
child: Expression,
direction: SortDirection,
nullOrdering: NullOrdering,
sameOrderExpressions: Seq[Expression])
extends Expression with Unevaluable {
override def children: Seq[Expression] = child +: sameOrderExpressions
override def checkInputDataTypes(): TypeCheckResult = {
if (RowOrdering.isOrderable(dataType)) {
TypeCheckResult.TypeCheckSuccess
} else {
TypeCheckResult.TypeCheckFailure(s"cannot sort data type ${dataType.catalogString}")
}
}
override def dataType: DataType = child.dataType
override def nullable: Boolean = child.nullable
override def toString: String = s"$child ${direction.sql} ${nullOrdering.sql}"
override def sql: String = child.sql + " " + direction.sql + " " + nullOrdering.sql
def isAscending: Boolean = direction == Ascending
def satisfies(required: SortOrder): Boolean = {
children.exists(required.child.semanticEquals) &&
direction == required.direction && nullOrdering == required.nullOrdering
}
override protected def withNewChildrenInternal(newChildren: IndexedSeq[Expression]): SortOrder =
copy(child = newChildren.head, sameOrderExpressions = newChildren.tail)
}
object SortOrder {
def apply(
child: Expression,
direction: SortDirection,
sameOrderExpressions: Seq[Expression] = Seq.empty): SortOrder = {
new SortOrder(child, direction, direction.defaultNullOrdering, sameOrderExpressions)
}
/**
* Returns if a sequence of SortOrder satisfies another sequence of SortOrder.
*
* SortOrder sequence A satisfies SortOrder sequence B if and only if B is an equivalent of A
* or of A's prefix. Here are examples of ordering A satisfying ordering B:
* <ul>
* <li>ordering A is [x, y] and ordering B is [x]</li>
* <li>ordering A is [x(sameOrderExpressions=x1)] and ordering B is [x1]</li>
* <li>ordering A is [x(sameOrderExpressions=x1), y] and ordering B is [x1]</li>
* </ul>
*/
def orderingSatisfies(ordering1: Seq[SortOrder], ordering2: Seq[SortOrder]): Boolean = {
if (ordering2.isEmpty) {
true
} else if (ordering2.length > ordering1.length) {
false
} else {
ordering2.zip(ordering1).forall {
case (o2, o1) => o1.satisfies(o2)
}
}
}
}
/**
* An expression to generate a 64-bit long prefix used in sorting. If the sort must operate over
* null keys as well, this.nullValue can be used in place of emitted null prefixes in the sort.
*/
case class SortPrefix(child: SortOrder) extends UnaryExpression {
val nullValue = child.child.dataType match {
case BooleanType | DateType | TimestampType | _: IntegralType | _: AnsiIntervalType =>
if (nullAsSmallest) Long.MinValue else Long.MaxValue
case dt: DecimalType if dt.precision - dt.scale <= Decimal.MAX_LONG_DIGITS =>
if (nullAsSmallest) Long.MinValue else Long.MaxValue
case _: DecimalType =>
if (nullAsSmallest) {
DoublePrefixComparator.computePrefix(Double.NegativeInfinity)
} else {
DoublePrefixComparator.computePrefix(Double.NaN)
}
case _ =>
if (nullAsSmallest) 0L else -1L
}
private def nullAsSmallest: Boolean = {
(child.isAscending && child.nullOrdering == NullsFirst) ||
(!child.isAscending && child.nullOrdering == NullsLast)
}
private lazy val calcPrefix: Any => Long = child.child.dataType match {
case BooleanType => (raw) =>
if (raw.asInstanceOf[Boolean]) 1 else 0
case DateType | TimestampType | _: IntegralType | _: AnsiIntervalType => (raw) =>
raw.asInstanceOf[java.lang.Number].longValue()
case FloatType | DoubleType => (raw) => {
val dVal = raw.asInstanceOf[java.lang.Number].doubleValue()
DoublePrefixComparator.computePrefix(dVal)
}
case StringType => (raw) =>
StringPrefixComparator.computePrefix(raw.asInstanceOf[UTF8String])
case BinaryType => (raw) =>
BinaryPrefixComparator.computePrefix(raw.asInstanceOf[Array[Byte]])
case dt: DecimalType if dt.precision <= Decimal.MAX_LONG_DIGITS =>
_.asInstanceOf[Decimal].toUnscaledLong
case dt: DecimalType if dt.precision - dt.scale <= Decimal.MAX_LONG_DIGITS =>
val p = Decimal.MAX_LONG_DIGITS
val s = p - (dt.precision - dt.scale)
(raw) => {
val value = raw.asInstanceOf[Decimal]
if (value.changePrecision(p, s)) value.toUnscaledLong else Long.MinValue
}
case dt: DecimalType => (raw) =>
DoublePrefixComparator.computePrefix(raw.asInstanceOf[Decimal].toDouble)
case _ => (Any) => 0L
}
override def eval(input: InternalRow): Any = {
val value = child.child.eval(input)
if (value == null) {
null
} else {
calcPrefix(value)
}
}
override def doGenCode(ctx: CodegenContext, ev: ExprCode): ExprCode = {
val childCode = child.child.genCode(ctx)
val input = childCode.value
val BinaryPrefixCmp = classOf[BinaryPrefixComparator].getName
val DoublePrefixCmp = classOf[DoublePrefixComparator].getName
val StringPrefixCmp = classOf[StringPrefixComparator].getName
val prefixCode = child.child.dataType match {
case BooleanType =>
s"$input ? 1L : 0L"
case _: IntegralType =>
s"(long) $input"
case DateType | TimestampType | _: AnsiIntervalType =>
s"(long) $input"
case FloatType | DoubleType =>
s"$DoublePrefixCmp.computePrefix((double)$input)"
case StringType => s"$StringPrefixCmp.computePrefix($input)"
case BinaryType => s"$BinaryPrefixCmp.computePrefix($input)"
case dt: DecimalType if dt.precision - dt.scale <= Decimal.MAX_LONG_DIGITS =>
if (dt.precision <= Decimal.MAX_LONG_DIGITS) {
s"$input.toUnscaledLong()"
} else {
// reduce the scale to fit in a long
val p = Decimal.MAX_LONG_DIGITS
val s = p - (dt.precision - dt.scale)
s"$input.changePrecision($p, $s) ? $input.toUnscaledLong() : ${Long.MinValue}L"
}
case dt: DecimalType =>
s"$DoublePrefixCmp.computePrefix($input.toDouble())"
case _ => "0L"
}
ev.copy(code = childCode.code +
code"""
|long ${ev.value} = 0L;
|boolean ${ev.isNull} = ${childCode.isNull};
|if (!${childCode.isNull}) {
| ${ev.value} = $prefixCode;
|}
""".stripMargin)
}
override def dataType: DataType = LongType
override protected def withNewChildInternal(newChild: Expression): SortPrefix =
copy(child = newChild.asInstanceOf[SortOrder])
}
| holdenk/spark | sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/SortOrder.scala | Scala | apache-2.0 | 8,837 |
/*
* Copyright (C) 2009-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package play
import play.core.server._
import play.api.routing.sird._
import play.api.mvc._
object NettyTestServer extends App {
lazy val Action = new ActionBuilder.IgnoringBody()(_root_.controllers.Execution.trampoline)
val port: Int = 8000
val server = NettyServer.fromRouter(ServerConfig(
port = Some(port),
address = "127.0.0.1"
)) {
case GET(p"/") => Action { implicit req =>
Results.Ok(s"Hello world")
}
}
println("Server (Netty) started: http://127.0.0.1:8000/ ")
// server.stop()
}
| Shruti9520/playframework | framework/src/play-netty-server/src/test/scala/play/NettyTestServer.scala | Scala | apache-2.0 | 608 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.feature
import org.apache.spark.annotation.Since
import org.apache.spark.ml.UnaryTransformer
import org.apache.spark.ml.param._
import org.apache.spark.ml.util._
import org.apache.spark.sql.types.{ArrayType, DataType, StringType}
/**
* A tokenizer that converts the input string to lowercase and then splits it by white spaces.
*
* @see [[RegexTokenizer]]
*/
@Since("1.2.0")
class Tokenizer @Since("1.4.0") (@Since("1.4.0") override val uid: String)
extends UnaryTransformer[String, Seq[String], Tokenizer] with DefaultParamsWritable {
@Since("1.2.0")
def this() = this(Identifiable.randomUID("tok"))
override protected def createTransformFunc: String => Seq[String] = {
_.toLowerCase.split("\\\\s")
}
override protected def validateInputType(inputType: DataType): Unit = {
require(inputType == StringType,
s"Input type must be ${StringType.catalogString} type but got ${inputType.catalogString}.")
}
override protected def outputDataType: DataType = new ArrayType(StringType, true)
@Since("1.4.1")
override def copy(extra: ParamMap): Tokenizer = defaultCopy(extra)
}
@Since("1.6.0")
object Tokenizer extends DefaultParamsReadable[Tokenizer] {
@Since("1.6.0")
override def load(path: String): Tokenizer = super.load(path)
}
/**
* A regex based tokenizer that extracts tokens either by using the provided regex pattern to split
* the text (default) or repeatedly matching the regex (if `gaps` is false).
* Optional parameters also allow filtering tokens using a minimal length.
* It returns an array of strings that can be empty.
*/
@Since("1.4.0")
class RegexTokenizer @Since("1.4.0") (@Since("1.4.0") override val uid: String)
extends UnaryTransformer[String, Seq[String], RegexTokenizer] with DefaultParamsWritable {
@Since("1.4.0")
def this() = this(Identifiable.randomUID("regexTok"))
/**
* Minimum token length, greater than or equal to 0.
* Default: 1, to avoid returning empty strings
* @group param
*/
@Since("1.4.0")
val minTokenLength: IntParam = new IntParam(this, "minTokenLength", "minimum token length (>= 0)",
ParamValidators.gtEq(0))
/** @group setParam */
@Since("1.4.0")
def setMinTokenLength(value: Int): this.type = set(minTokenLength, value)
/** @group getParam */
@Since("1.4.0")
def getMinTokenLength: Int = $(minTokenLength)
/**
* Indicates whether regex splits on gaps (true) or matches tokens (false).
* Default: true
* @group param
*/
@Since("1.4.0")
val gaps: BooleanParam = new BooleanParam(this, "gaps", "Set regex to match gaps or tokens")
/** @group setParam */
@Since("1.4.0")
def setGaps(value: Boolean): this.type = set(gaps, value)
/** @group getParam */
@Since("1.4.0")
def getGaps: Boolean = $(gaps)
/**
* Regex pattern used to match delimiters if [[gaps]] is true or tokens if [[gaps]] is false.
* Default: `"\\\\s+"`
* @group param
*/
@Since("1.4.0")
val pattern: Param[String] = new Param(this, "pattern", "regex pattern used for tokenizing")
/** @group setParam */
@Since("1.4.0")
def setPattern(value: String): this.type = set(pattern, value)
/** @group getParam */
@Since("1.4.0")
def getPattern: String = $(pattern)
/**
* Indicates whether to convert all characters to lowercase before tokenizing.
* Default: true
* @group param
*/
@Since("1.6.0")
final val toLowercase: BooleanParam = new BooleanParam(this, "toLowercase",
"whether to convert all characters to lowercase before tokenizing.")
/** @group setParam */
@Since("1.6.0")
def setToLowercase(value: Boolean): this.type = set(toLowercase, value)
/** @group getParam */
@Since("1.6.0")
def getToLowercase: Boolean = $(toLowercase)
setDefault(minTokenLength -> 1, gaps -> true, pattern -> "\\\\s+", toLowercase -> true)
override protected def createTransformFunc: String => Seq[String] = { originStr =>
val re = $(pattern).r
val str = if ($(toLowercase)) originStr.toLowerCase() else originStr
val tokens = if ($(gaps)) re.split(str).toSeq else re.findAllIn(str).toSeq
val minLength = $(minTokenLength)
tokens.filter(_.length >= minLength)
}
override protected def validateInputType(inputType: DataType): Unit = {
require(inputType == StringType, s"Input type must be string type but got $inputType.")
}
override protected def outputDataType: DataType = new ArrayType(StringType, true)
@Since("1.4.1")
override def copy(extra: ParamMap): RegexTokenizer = defaultCopy(extra)
}
@Since("1.6.0")
object RegexTokenizer extends DefaultParamsReadable[RegexTokenizer] {
@Since("1.6.0")
override def load(path: String): RegexTokenizer = super.load(path)
}
| tengpeng/spark | mllib/src/main/scala/org/apache/spark/ml/feature/Tokenizer.scala | Scala | apache-2.0 | 5,527 |
// This does not currently work because it mixes higher-kinded types and raw type constructors.
package dotty.collection
package immutable
import annotation.unchecked.uncheckedVariance
trait Collection[T] { self =>
type This <: Collection { type This <: self.This }
def companion: CollectionCompanion[This]
}
trait Iterable[T] extends Collection[T] { self =>
type This <: Iterable { type This <: self.This }
override def companion: IterableCompanion[This] = Iterable.asInstanceOf
def iterator: Iterator[T]
}
trait Seq[T] extends Iterable[T] { self =>
type This <: Seq { type This <: self.This }
override def companion: IterableCompanion[This] = Seq.asInstanceOf
def apply(x: Int): T
}
abstract class CollectionCompanion[+CC[X] <: Collection[X] { type This <: CC }]
abstract class IterableCompanion[+CC[X] <: Iterable[X] { type This <: CC }] extends CollectionCompanion[CC] {
def fromIterator[T](it: Iterator[T]): CC[T]
def map[T, U](xs: Iterable[T], f: T => U): CC[U] =
fromIterator(xs.iterator.map(f))
def filter[T](xs: Iterable[T], p: T => Boolean): CC[T] =
fromIterator(xs.iterator.filter(p))
def flatMap[T, U](xs: Iterable[T], f: T => TraversableOnce[U]): CC[U] =
fromIterator(xs.iterator.flatMap(f))
implicit def transformOps[T](xs: CC[T] @uncheckedVariance): TransformOps[CC, T] = ??? // new TransformOps[CC, T](xs)
}
class TransformOps[+CC[X] <: Iterable[X] { type This <: CC }, T] (val xs: CC[T]) extends AnyVal {
def companion[T](xs: CC[T] @uncheckedVariance): IterableCompanion[CC] = xs.companion
def map[U](f: T => U): CC[U] = companion(xs).map(xs, f)
def filter(p: T => Boolean): CC[T] = companion(xs).filter(xs, p)
def flatMap[U](f: T => TraversableOnce[U]): CC[U] = companion(xs).flatMap(xs, f)
}
object Iterable extends IterableCompanion[Iterable] {
def fromIterator[T](it: Iterator[T]): Iterable[T] = ???
}
object Seq extends IterableCompanion[Seq] {
def fromIterator[T](it: Iterator[T]): Seq[T] = ???
}
| som-snytt/dotty | tests/invalid/pos/IterableSelfRec.scala | Scala | apache-2.0 | 1,982 |
package io.udash.properties.single
import com.avsystem.commons._
import io.udash.properties._
import io.udash.properties.seq.{ReadableSeqProperty, ReadableSeqPropertyFromSingleValue}
import io.udash.utils.Registration
/** Base interface of every Property in Udash. */
trait ReadableProperty[+A] {
/** @return Current property value. */
def get: A
/**
* Registers listener which will be called on value change.
*
* @param initUpdate If `true`, listener will be instantly triggered with current value of property.
*/
def listen(valueListener: A => Any, initUpdate: Boolean = false): Registration
/** Registers listener which will be called on the next value change. This listener will be fired only once. */
def listenOnce(valueListener: A => Any): Registration
/** Returns listeners count. */
def listenersCount(): Int
/** This method should be called when the value has changed. */
protected[properties] def valueChanged(): Unit
/** This method should be called when the listener is registered or removed. */
protected[properties] def listenersUpdate(): Unit
/**
* Creates ReadableProperty[B] linked to `this`. Changes will be synchronized with `this`.
*
* @param transformer Method transforming type A of existing Property to type B of new Property.
* @tparam B Type of new Property.
* @return New ReadableProperty[B], which will be synchronised with original ReadableProperty[A].
*/
def transform[B](transformer: A => B): ReadableProperty[B]
/**
* Creates ReadableSeqProperty[B] linked to `this`. Changes will be synchronized with `this`.
*
* @param transformer Method transforming type A of existing Property to type Seq[B] of new Property.
* @tparam B Type of elements in new SeqProperty.
* @return New ReadableSeqProperty[B], which will be synchronised with original ReadableProperty[A].
*/
def transformToSeq[B: PropertyCreator](transformer: A => BSeq[B]): ReadableSeqProperty[B, ReadableProperty[B]]
/**
* Streams value changes to the `target` property.
* It is not as strong relation as `transform`, because `target` can change value independently.
*/
def streamTo[B](target: Property[B], initUpdate: Boolean = true)(transformer: A => B): Registration
/**
* Creates a mutable copy of this property which follows the stream of updates from this property.
* Similarly to [[streamTo]], the target can change value independently and origin value updates can be cancelled.
*/
def mirror[B >: A : PropertyCreator](): MirrorProperty[B] =
new MirrorProperty(this)
/**
* Combines two properties into a new one. Created property will be updated after any change in the origin ones.
*
* @param property `Property[B]` to combine with `this`.
* @param combiner Method combining values A and B into O.
* @tparam B Type of elements in provided property.
* @tparam O Output property elements type.
* @return Property[O] updated on any change in `this` or `property`.
*/
def combine[B, O](property: ReadableProperty[B])(combiner: (A, B) => O): ReadableProperty[O] =
new CombinedProperty[A, B, O](this, property, combiner)
}
final class MirrorProperty[A: PropertyCreator](origin: ReadableProperty[A]) {
private val castable: CastableProperty[A] = PropertyCreator[A].newProperty(origin.get, null)
private val registration = origin.streamTo(castable, initUpdate = false)(identity)
}
object MirrorProperty {
implicit def castable[A](property: MirrorProperty[A]): CastableProperty[A] = property.castable
implicit def registration(property: MirrorProperty[_]): Registration = property.registration
}
private[properties] trait AbstractReadableProperty[A] extends ReadableProperty[A] {
protected[this] final val listeners: MLinkedHashSet[A => Any] = MLinkedHashSet.empty
protected[this] final val oneTimeListeners: MArrayBuffer[Registration] = MArrayBuffer.empty
protected def wrapListenerRegistration(reg: Registration): Registration = reg
protected def wrapOneTimeListenerRegistration(reg: Registration): Registration = wrapListenerRegistration(reg)
/** Parent property. `null` if this property has no parent. */
protected def parent: ReadableProperty[_]
override def listen(valueListener: A => Any, initUpdate: Boolean = false): Registration = {
listeners += valueListener
listenersUpdate()
if (initUpdate) valueListener(this.get)
wrapListenerRegistration(
new MutableSetRegistration(listeners, valueListener, Opt(listenersUpdate _))
)
}
override def listenOnce(valueListener: A => Any): Registration = {
val reg = wrapOneTimeListenerRegistration(
new MutableSetRegistration(listeners, valueListener, Opt(listenersUpdate _))
)
listeners += valueListener
oneTimeListeners += reg
listenersUpdate()
reg
}
override def listenersCount(): Int =
listeners.size
override protected[properties] def listenersUpdate(): Unit = {
if (parent != null) parent.listenersUpdate()
}
override def transform[B](transformer: A => B): ReadableProperty[B] =
new TransformedReadableProperty[A, B](this, transformer)
override def transformToSeq[B: PropertyCreator](transformer: A => BSeq[B]): ReadableSeqProperty[B, ReadableProperty[B]] =
new ReadableSeqPropertyFromSingleValue(this, transformer)
override def streamTo[B](target: Property[B], initUpdate: Boolean = true)(transformer: A => B): Registration = {
@inline def update(v: A): Unit = target.set(transformer(v))
if (initUpdate) update(get)
val listenerRegistration = listen(update)
new Registration {
override def cancel(): Unit = listenerRegistration.cancel()
override def isActive: Boolean = listenerRegistration.isActive
override def restart(): Unit = {
listenerRegistration.restart()
update(get)
}
}
}
protected[properties] override def valueChanged(): Unit = {
val originalListeners = listeners.toList
CallbackSequencer().queue(s"$hashCode:valueChanged", () => {
val value = get
originalListeners.foreach { listener => if (listeners.contains(listener)) listener(value) }
oneTimeListeners.foreach(_.cancel())
oneTimeListeners.clear()
})
if (parent != null) parent.valueChanged()
}
} | UdashFramework/udash-core | core/src/main/scala/io/udash/properties/single/ReadableProperty.scala | Scala | apache-2.0 | 6,290 |
// Copyright 2017 EPFL DATA Lab (data.epfl.ch)
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package squid
package functional
import squid.ir.{BindingNormalizer, TopDownTransformer}
import utils._
class FoldTupleVarOptimsNew extends MyFunSuite(NormDSL) {
import DSL.Predef._
object Optim extends FoldTupleVarOptimNew.ForNormDSL
test("Foldleft to foreach to while") {
val ls = code"?ls: List[Int]"
val f = code"?f: ((Int, Int) => Int)"
code"$ls.foldLeft(0)($f)" transformWith Optim eqt
code"""
var acc = 0;
{
var ite = $ls
while (ite nonEmpty) {
{ val tmp = ite.head; acc = $f(acc,tmp) }
ite = ite.tail
}
}
acc
"""
}
test("Tuple Variable Inlining") {
eqtBy(code"var two = (1,0); while (two._1 + two._2 < 42) two = (two._1 + 1, two._2 + 2); two" transformWith Optim,
code"""
var a = 1;
var b = 0;
while (a + b < 42) {
a += 1
b += 2
}
(a, b)
""")(_ =~= _)
}
test("FoldLeft on tuple to while on scalar vars") {
// TODO make the combined optims work!
// Problem is: `cur` is not assigned a tuple, but an applied Function2 which is equivalent to a tuple, and we don't inline it...
// Even without inlining, we could solve the problem by just normalizing. Eg put it in ANF.
//println(ir"List(1,2,3).foldLeft((0,0))((acc,x) => (acc._2, acc._1+x))" transformWith Optim)
//println(ir"val r = List(1,2,3).foldLeft((0,0))((acc,x) => (acc._2, acc._1+x)); r._1 + r._2" transformWith Optim)
}
object Stopt extends StaticOptimizer[FoldTupleVarOptimNew.ForNormDSL]
import Stopt._
test("Static optimization") {
assert(optimize {
List(1,2,3).foldLeft(0)(_ + _)
} == 6)
assert(optimize {
//List(1,2,3).foldLeft(0)(acc_n => acc_n._1 + acc_n._2)
//List(1,2,3).foldLeft((0,0))((acc,x) => (acc._2,acc._1+x))
val r = List(1,2,3).foldLeft((0,0))((acc,x) => (acc._2,acc._1+x)); r._1 + r._2
} == (2+4))
}
}
| epfldata/squid | src/test/scala/squid/functional/FoldTupleVarOptimsNew.scala | Scala | apache-2.0 | 2,618 |
package uk.gov.dvla.vehicles.presentation.common.mappings
import play.api.data.FormError
import play.api.data.Forms._
import play.api.data.format.Formatter
import uk.gov.dvla.vehicles.presentation.common
import common.model.{SearchFields, Address}
import common.views.models.AddressAndPostcodeViewModel.Form.addressLinesFormat
import common.views.models.AddressAndPostcodeViewModel.Form.buildingNameOrNumberFormat
import common.views.models.AddressAndPostcodeViewModel.Form.postTownFormat
import common.views.models.AddressLinesViewModel.Form.BuildingNameOrNumberMinLength
import common.views.models.AddressLinesViewModel.Form.PostTownMinLength
object AddressPicker {
private final val AddressLinesFormat = addressLinesFormat.pattern // additional address lines
private final val PostTownFormat = postTownFormat.pattern
final val SearchByPostcodeField = "address-postcode-lookup"
final val AddressLine1Id = "address-line-1"
final val AddressLine2Id = "address-line-2"
final val AddressLine3Id = "address-line-3"
final val PostTownId = "post-town"
final val PostcodeId = "post-code"
final val RememberId = "remember-details"
final val ShowSearchFields = "show-search-fields"
final val ShowAddressSelect = "show-address-select"
final val ShowAddressFields = "show-address-fields"
final val AddressListSelect = "address-list"
def formatter() = new Formatter[Address] {
override def bind(key: String, data: Map[String, String]): Either[Seq[FormError], Address] = {
val filterEmpty = data.filterNot{case (_, v) => v.isEmpty}
val rememberDetails = filterEmpty.get(s"$key.$RememberId")
val showSearchFields = filterEmpty.get(s"$key.$ShowSearchFields").fold(false)(_.toBoolean)
val showAddressSelect = filterEmpty.get(s"$key.$ShowAddressSelect").fold(false)(_.toBoolean)
val showAddressFields = filterEmpty.get(s"$key.$ShowAddressFields").fold(true)(_.toBoolean)
val searchPostCode = filterEmpty.get(s"$key.$SearchByPostcodeField")
val listOption = filterEmpty.get(s"$key.$AddressListSelect")
val line1 = filterEmpty.get(s"$key.$AddressLine1Id")
val line2 = filterEmpty.get(s"$key.$AddressLine2Id")
val line3 = filterEmpty.get(s"$key.$AddressLine3Id")
val postTown = filterEmpty.get(s"$key.$PostTownId")
val postCode = filterEmpty.get(s"$key.$PostcodeId")
type SFE = Seq[FormError]
val postCodeErrors =
if (showSearchFields)
Postcode.postcode.withPrefix(s"$key.$SearchByPostcodeField").bind(data) match {
case Left(errors) => errors
case Right(postCode) =>
if (!showAddressFields) Seq(FormError(s"$key.$SearchByPostcodeField", "error.address.required"))
else Seq.empty[FormError]
}
else Seq.empty[FormError]
val addressFieldsErrors =
if(showAddressFields)
line1.fold[SFE](Seq(FormError(s"$key.$AddressLine1Id", "error.address.addressLine1"))) { line =>
val addressLine1Err =
if (line1.getOrElse("").length < BuildingNameOrNumberMinLength)
Seq(FormError(s"$key.$AddressLine1Id", "error.address.buildingNameOrNumber.invalid"))
else { // perform three alpha check iff min length is valid
if (!buildingNameOrNumberFormat.pattern.matcher(line).matches)
Seq(FormError(s"$key.$AddressLine1Id", "error.address.threeAlphas"))
else Nil}
Seq(AddressLine1Id, AddressLine2Id, AddressLine3Id).flatMap { lkey =>
filterEmpty.get(s"$key.$lkey").fold[Option[FormError]](None) { line =>
if (AddressLinesFormat.matcher(line).matches()) None
else Some(FormError(s"$key.$lkey", "error.address.characterInvalid"))
}
} ++ addressLine1Err
} ++ postTown.fold[SFE](Seq(FormError(s"$key.$PostTownId", "error.address.postTown"))) { postTown =>
if (postTown.length < PostTownMinLength)
Seq(FormError(s"$key.$PostTownId", "error.address.postTown"))
else {
if (PostTownFormat.matcher(postTown).matches()) Nil
else Seq(FormError(s"$key.$PostTownId", "error.address.postTown.characterInvalid"))}
} ++ postCode.fold[SFE](Seq(FormError(s"$key.$PostcodeId", "error.address.postCode"))) { postCode =>
Postcode.postcode.withPrefix(s"$key.$PostcodeId").bind(Map(s"$key.$PostcodeId" -> postCode)) match {
case Left(errors) => errors
case Right(result) => Nil
}
}
else Seq.empty[FormError]
val errors = postCodeErrors ++ addressFieldsErrors
if (errors.nonEmpty) Left(errors)
else Right(Address(
SearchFields(
showSearchFields,
showAddressSelect,
showAddressFields,
searchPostCode,
listOption,
rememberDetails.isDefined
),
line1.get,
line2,
line3,
postTown.get,
postCode.get.toUpperCase()
))
}
override def unbind(key: String, value: Address): Map[String, String] = Map(
s"$key.$AddressLine1Id" -> value.streetAddress1,
s"$key.$PostTownId" -> value.postTown,
s"$key.$PostcodeId" -> value.postCode,
s"$key.$ShowSearchFields" -> value.searchFields.showSearchFields.toString,
s"$key.$ShowAddressSelect" -> value.searchFields.showAddressSelect.toString,
s"$key.$ShowAddressFields" -> value.searchFields.showAddressFields.toString
) ++
toMap(value.streetAddress2, s"$key.$AddressLine2Id") ++
toMap(value.streetAddress3, s"$key.$AddressLine3Id") ++
toMap(value.searchFields.postCode, s"$key.$SearchByPostcodeField") ++
toMap(value.searchFields.listOption, s"$key.$AddressListSelect") ++
toMap(value.searchFields.postCode, s"$key.$SearchByPostcodeField") ++
toMap(if(value.searchFields.remember) Some("true") else None, s"$key.$RememberId")
private def toMap(opt: Option[String], key: String) = opt.fold(Map[String, String]())(value => Map(key -> value))
}
val mapAddress = of[Address](AddressPicker.formatter())
}
| dvla/vehicles-presentation-common | app/uk/gov/dvla/vehicles/presentation/common/mappings/AddressPicker.scala | Scala | mit | 6,176 |
package org.http4s
package headers
/**
* Defined by http://tools.ietf.org/html/rfc6797
*/
object `Strict-Transport-Security` extends HeaderKey.Default
| hvesalai/http4s | core/src/main/scala/org/http4s/headers/Strict-Transport-Security.scala | Scala | apache-2.0 | 154 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.