code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
/*
* Copyright (c) 2014 Ben Whitehead.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.benwhitehead.finch
import com.twitter.conversions.time.longToTimeableNumber
import com.twitter.finagle.Service
import com.twitter.finagle.builder.ClientBuilder
import com.twitter.finagle.httpx.path.{->, /, Root}
import com.twitter.finagle.httpx.{Http, Method, RequestBuilder}
import com.twitter.util._
import io.finch._
import io.finch.response._
import org.scalatest.{BeforeAndAfterEach, FreeSpec}
import java.io.{BufferedReader, File, FileReader}
/**
* @author Ben Whitehead
*/
class FinchServerTest extends FreeSpec with BeforeAndAfterEach {
lazy val logger = org.slf4j.LoggerFactory.getLogger(getClass.getName)
class Echo extends HttpEndpoint {
var echos: List[String] = Nil
def service(echo: String) = new Service[HttpRequest, HttpResponse] {
def apply(request: HttpRequest): Future[HttpResponse] = {
echos = echos :+ echo
Ok(echo).toFuture
}
}
def route = {
case Method.Get -> Root / "echo" / echo => service(echo)
}
}
class TestServer extends SimpleFinchServer {
lazy val pidFile = File.createTempFile("testServer", ".pid", new File(System.getProperty("java.io.tmpdir")))
pidFile.deleteOnExit()
override lazy val defaultHttpPort = 0
override lazy val config = Config(port = 0, pidPath = pidFile.getAbsolutePath)
override lazy val serverName = "test-server"
lazy val echos = new Echo
def endpoint = {
echos
}
}
class TestClient(hostPort: String) {
lazy val client =
ClientBuilder().
codec(Http()).
hosts(hostPort).
tcpConnectTimeout(1.second).
requestTimeout(10.seconds).
hostConnectionLimit(1).
build
def close() = client.close()
def get(uri: String): Future[String] = {
client(RequestBuilder().url(s"http://$hostPort/$uri".replaceAll("(?<!:)//", "/")).buildGet) flatMap {
case response =>
val buf = response.content
val out = Array.ofDim[Byte](buf.length)
buf.write(out, 0)
Future.value(new String(out, "UTF-8"))
}
}
}
var server: TestServer = null
var serverThread: Thread = null
var client: TestClient = null
override protected def beforeEach() = {
server = new TestServer
serverThread = new Thread {
override def run() = server.main(Array())
}
serverThread.start() // TODO: Figure out a better way to start the server in another thread or background it
Thread.sleep(2500) // TODO: Figure out how to "Await" on the server
client = new TestClient(s"localhost:${server.serverPort}")
}
override protected def afterEach()= {
server.close()
serverThread.stop()
client.close()
}
"start server" - {
"write pid" in {
val reader = new BufferedReader(new FileReader(server.pidFile))
val pid = reader.readLine().toInt
logger.info(s"pid = $pid")
assert(pid > 1)
}
"handle request" in {
val resp = Await.result(client.get("/echo/test"))
assert(resp === "test")
assert(server.echos.echos.contains("test"))
}
"handle 5 requests" in {
val fs = Future.collect(
(1 to 5) map { i => client.get(s"/echo/test$i")}
)
val resp = Await.result(fs)
assert(resp.size === 5)
assert(resp.toSet === Set("test1", "test2", "test3", "test4", "test5"))
assert(server.echos.echos.contains("test1"))
assert(server.echos.echos.contains("test2"))
assert(server.echos.echos.contains("test3"))
assert(server.echos.echos.contains("test4"))
assert(server.echos.echos.contains("test5"))
}
}
}
| samklr/finch-server | src/test/scala/io/github/benwhitehead/finch/FinchServerTest.scala | Scala | apache-2.0 | 4,221 |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.scala.dsl;
import org.apache.camel.builder.ExpressionBuilder
import org.apache.camel.model.DelayDefinition
import org.apache.camel.model.language.ExpressionDefinition
import org.apache.camel.scala.dsl.builder.RouteBuilder
/**
* Scala enrichment for Camel's DelayDefinition
*/
case class SDelayDefinition(override val target: DelayDefinition)(implicit val builder: RouteBuilder) extends SAbstractDefinition[DelayDefinition] {
def ms = this
def milliseconds = ms
def sec = {
valueInMs *= 1000
this
}
def seconds = sec
def min = {
valueInMs *= (60 * 1000)
this
}
def minutes = min
// we need this to match the valueInMs_= for now, can be removed once Scala 2.8.0 is out
def valueInMs : Long = 0
def valueInMs_=(period: Long) = target.delay(period)
}
| kingargyle/turmeric-bot | components/camel-scala/src/main/scala/org/apache/camel/scala/dsl/SDelayDefinition.scala | Scala | apache-2.0 | 1,629 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.visor
import org.apache.ignite.IgniteSystemProperties.IGNITE_UPDATE_NOTIFIER
import org.apache.ignite._
import org.apache.ignite.cluster.{ClusterGroup, ClusterGroupEmptyException, ClusterMetrics, ClusterNode}
import org.apache.ignite.events.EventType._
import org.apache.ignite.events.{DiscoveryEvent, Event}
import org.apache.ignite.internal.IgniteEx
import org.apache.ignite.internal.IgniteNodeAttributes._
import org.apache.ignite.internal.cluster.ClusterGroupEmptyCheckedException
import org.apache.ignite.internal.util.lang.{GridFunc => F}
import org.apache.ignite.internal.util.typedef._
import org.apache.ignite.internal.util.{GridConfigurationFinder, IgniteUtils => U}
import org.apache.ignite.lang._
import org.apache.ignite.thread.{IgniteThreadFactory, IgniteThreadPoolExecutor}
import org.apache.ignite.visor.commands.common.VisorTextTable
import jline.console.ConsoleReader
import org.jetbrains.annotations.Nullable
import java.io._
import java.lang.{Boolean => JavaBoolean}
import java.net._
import java.text._
import java.util.concurrent._
import java.util.{Collection => JavaCollection, HashSet => JavaHashSet, _}
import org.apache.ignite.internal.visor.cache._
import org.apache.ignite.internal.visor.node.VisorNodeEventsCollectorTaskArg
import org.apache.ignite.internal.visor.node._
import org.apache.ignite.internal.visor.util.VisorEventMapper
import org.apache.ignite.internal.visor.util.VisorTaskUtils._
import org.apache.ignite.internal.visor.{VisorMultiNodeTask, VisorTaskArgument}
import scala.collection.JavaConversions._
import scala.collection.immutable
import scala.language.{implicitConversions, reflectiveCalls}
import scala.util.control.Breaks._
/**
* Holder for command help information.
*/
sealed case class VisorCommandHolder(
name: String,
shortInfo: String,
longInfo: Seq[String],
aliases: Seq[String],
spec: Seq[String],
args: Seq[(String, AnyRef)],
examples: Seq[(String, AnyRef)],
emptyArgs: () => Unit,
withArgs: (String) => Unit
) {
/** Command host with optional aliases. */
lazy val nameWithAliases: String =
if (aliases != null && aliases.nonEmpty)
name + " (" + ("" /: aliases)((b, a) => if (b.length() == 0) a else b + ", " + a) + ")"
else
name
}
/**
* ==Overview==
* This is the '''tagging''' trait existing solely to have type associated with
* with `visor` object so that implicit conversions can be done
* on `visor` object itself. Implicit conversions are essential to extensibility
* of the Visor.
*
* ==Example==
* This is an example on how [[VisorTag]] trait is used to
* extend `visor` natively with custom commands:
*
* <ex>
* class VisorCustomCommand {
* def foo(@Nullable args: String) = {
* if (visor.hasValue("bar", visor.parse(args)))
* println("foobar")
* else
* println("foo")
* }
* def foo(@Nullable args: Symbol*) = foo(visor.flatSymbols(args: _*))
* }
* object VisorCustomCommand {
* implicit def fromVisor(vs: VisorTag) = new VisorCustomCommand
* }
* </ex>
*/
trait VisorTag
/**
* {{{
* ___ _________________________ ________
* __ | / /____ _/__ ___/__ __ \\___ __ \\
* __ | / / __ / _____ \\ _ / / /__ /_/ /
* __ |/ / __/ / ____/ / / /_/ / _ _, _/
* _____/ /___/ /____/ \\____/ /_/ |_|
*
* }}}
*
* ==Overview==
* Visor console provides monitoring capabilities for Ignite.
*
* ==Usage==
* Ignite ships with `IGNITE_HOME/bin/ignitevisorcmd.{sh|bat}` script that starts Visor console.
*
* Just type:<ex>help</ex> in Visor console to get help and get started.
*/
@IgniteNotPeerDeployable
object visor extends VisorTag {
/** Argument type. */
type Arg = (String, String)
/** Type alias for command argument list. */
type ArgList = Seq[Arg]
/** Type alias for general node filter. */
type NodeFilter = ClusterNode => Boolean
/** Type alias for general event filter. */
type EventFilter = Event => Boolean
/** `Nil` is for empty list, `Til` is for empty tuple. */
val Til: Arg = (null, null)
/** Node filter that includes any node. */
final val ALL_NODES_FILTER = (_: ClusterNode) => true
/** System line separator. */
final val NL = System getProperty "line.separator"
/** Display value for `null`. */
final val NA = "<n/a>"
/** */
private var cmdLst: Seq[VisorCommandHolder] = Nil
/** Node left listener. */
private var nodeLeftLsnr: IgnitePredicate[Event] = _
/** Node join listener. */
private var nodeJoinLsnr: IgnitePredicate[Event] = _
/** Node segmentation listener. */
private var nodeSegLsnr: IgnitePredicate[Event] = _
/** Node stop listener. */
private var nodeStopLsnr: IgnitionListener = _
/** */
@volatile private var isCon: Boolean = false
/**
* Whether or not Visor is the owner of connection - or it
* reused one already opened.
*/
@volatile private var conOwner: Boolean = false
/** */
@volatile private var conTs: Long = 0
private final val LOC = Locale.US
/** Date time format. */
private final val dtFmt = new SimpleDateFormat("MM/dd/yy, HH:mm:ss", LOC)
/** Date format. */
private final val dFmt = new SimpleDateFormat("dd MMMM yyyy", LOC)
private final val DEC_FMT_SYMS = new DecimalFormatSymbols(LOC)
/** Number format. */
private final val nmFmt = new DecimalFormat("#", DEC_FMT_SYMS)
/** KB format. */
private final val kbFmt = new DecimalFormat("###,###,###,###,###", DEC_FMT_SYMS)
/** */
private val mem = new ConcurrentHashMap[String, String]()
/** List of close callbacks*/
@volatile private var cbs = Seq.empty[() => Unit]
/** List of shutdown callbacks*/
@volatile private var shutdownCbs = Seq.empty[() => Unit]
/** Default log file path. */
/**
* Default log file path. Note that this path is relative to `IGNITE_HOME/work` folder
* if `IGNITE_HOME` system or environment variable specified, otherwise it is relative to
* `work` folder under system `java.io.tmpdir` folder.
*/
private final val DFLT_LOG_PATH = "visor/visor-log"
/** Log file. */
private var logFile: File = _
/** Log timer. */
private var logTimer: Timer = _
/** Topology log timer. */
private var topTimer: Timer = _
/** Log started flag. */
@volatile private var logStarted = false
/** Internal thread pool. */
@volatile var pool: ExecutorService = new IgniteThreadPoolExecutor(
Runtime.getRuntime().availableProcessors(),
Runtime.getRuntime().availableProcessors(),
0L,
new LinkedBlockingQueue[Runnable](),
new IgniteThreadFactory("visorInstance", "visor")
)
/** Configuration file path, if any. */
@volatile var cfgPath: String = _
/** */
@volatile var ignite: IgniteEx = _
/** */
@volatile var prevIgnite: Option[IgniteEx] = None
private var reader: ConsoleReader = _
var batchMode: Boolean = false
def reader(reader: ConsoleReader) {
assert(reader != null)
this.reader = reader
}
/**
* Get grid node for specified ID.
*
* @param nid Node ID.
* @return ClusterNode instance.
* @throws IgniteException if Visor is disconnected or node not found.
*/
def node(nid: UUID): ClusterNode = {
val g = ignite
if (g == null)
throw new IgniteException("Visor disconnected")
else {
val node = g.cluster.node(nid)
if (node == null)
throw new IgniteException("Node is gone: " + nid)
node
}
}
/**
* @param node Optional node.
* @param cacheName Cache name to take cluster group for.
* @return Cluster group with data nodes for specified cache or cluster group for specified node.
*/
def groupForDataNode(node: Option[ClusterNode], cacheName: String) = {
val grp = node match {
case Some(n) => ignite.cluster.forNode(n)
case None => ignite.cluster.forNodeIds(executeRandom(classOf[VisorCacheNodesTask],
new VisorCacheNodesTaskArg(cacheName)))
}
if (grp.nodes().isEmpty)
throw new ClusterGroupEmptyException("Topology is empty.")
grp
}
/**
* @param nodeOpt Node.
* @param cacheName Cache name.
* @return Message about why node was not found.
*/
def messageNodeNotFound(nodeOpt: Option[ClusterNode], cacheName: String) = nodeOpt match {
case Some(node) => "Can't find node with specified id: " + node.id()
case None => "Can't find nodes for cache: " + escapeName(cacheName)
}
Runtime.getRuntime.addShutdownHook(new Thread() {
override def run() {
try
if (ignite != null && isConnected) {
// Call all shutdown callbacks.
shutdownCbs foreach(_.apply())
close() // This will stop the grid too if Visor is connection owner.
}
catch {
case ignore: Throwable => // ignore
}
}
})
// Make sure visor starts without version checker print.
System.setProperty(IGNITE_UPDATE_NOTIFIER, "false")
addHelp(
name = "mlist",
shortInfo = "Prints Visor console memory variables.",
spec = Seq(
"mlist {arg}"
),
args = Seq(
"arg" ->
"String that contains start characters of variable names."
),
examples = Seq(
"mlist" ->
"Prints out all Visor console memory variables.",
"mlist ac" ->
"Lists variables that start with 'a' or 'c' from Visor console memory."
),
emptyArgs = mlist,
withArgs = mlist
)
addHelp(
name = "mget",
shortInfo = "Gets Visor console memory variable.",
longInfo = Seq(
"Gets Visor console memory variable. Variable can be referenced with '@' prefix."
),
spec = Seq(
"mget <@v>"
),
args = Seq(
"@v" ->
"Variable name."
),
examples = Seq(
"mget <@v>" ->
"Gets Visor console variable whose name is referenced by variable 'v'."
),
emptyArgs = mget,
withArgs = mget
)
addHelp(
name = "mcompact",
shortInfo = "Fills gap in Visor console memory variables.",
longInfo = Seq(
"Finds and fills gap in Visor console memory variables."
),
spec = Seq(
"mcompact"
),
examples = Seq(
"mcompact" ->
"Fills gap in Visor console memory variables."
),
emptyArgs = mcompact,
withArgs = _ => wrongArgs("mcompact")
)
addHelp(
name = "help",
shortInfo = "Prints Visor console help.",
aliases = Seq("?"),
spec = Seq(
"help {c1 c2 ... ck}"
),
args = Seq(
"ck" ->
"Command to get help for."
),
examples = Seq(
"help status" ->
"Prints help for 'status' command.",
"help" ->
"Prints help for all command."
),
emptyArgs = help,
withArgs = help
)
addHelp(
name = "status",
shortInfo = "Prints Visor console status.",
aliases = Seq("!"),
spec = Seq(
"status {-q}"
),
args = Seq(
"-q" ->
"Quite output without ASCII logo."
),
examples = Seq(
"status" ->
"Prints Visor console status.",
"status -q" ->
"Prints Visor console status in quiet mode."
),
emptyArgs = status,
withArgs = status
)
/**
* @param name - command name.
*/
private def wrongArgs(name: String) {
warn("Invalid arguments for command without arguments.",
s"Type 'help $name' to see how to use this command.")
}
addHelp(
name = "close",
shortInfo = "Disconnects Visor console from the grid.",
spec = Seq("close"),
examples = Seq(
"close" ->
"Disconnects Visor console from the grid."
),
emptyArgs = close,
withArgs = _ => wrongArgs("close")
)
addHelp(
name = "quit",
shortInfo = "Quit from Visor console.",
spec = Seq("quit"),
examples = Seq(
"quit" ->
"Quit from Visor console."
),
aliases = Seq("exit"),
emptyArgs = quit,
withArgs = _ => wrongArgs("quit")
)
addHelp(
name = "log",
shortInfo = "Starts or stops grid-wide events logging.",
longInfo = Seq(
"Logging of discovery and failure grid-wide events.",
" ",
"Events are logged to a file. If path is not provided,",
"it will log into '<Ignite home folder>/work/visor/visor-log'.",
" ",
"File is always opened in append mode.",
"If file doesn't exist, it will be created.",
" ",
"It is often convenient to 'tail -f' the log file",
"in a separate console window.",
" ",
"Log command prints periodic topology snapshots in the following format:",
"H/N/C |1 |1 |4 |=^========..........|",
"where:",
" H - Hosts",
" N - Nodes",
" C - CPUs",
" = - 5%-based marker of average CPU load across the topology",
" ^ - 5%-based marker of average heap memory used across the topology"
),
spec = Seq(
"log",
"log -l {-f=<path>} {-p=<num>} {-t=<num>} {-dl}",
"log -s"
),
args = Seq(
"-l" -> Seq(
"Starts logging.",
"If logging is already started - it's no-op."
),
"-f=<path>" -> Seq(
"Provides path to the file.",
"Path to the file can be absolute or relative to Ignite home folder."
),
"-p=<num>" -> Seq(
"Provides period of querying events (in seconds).",
"Default is 10."
),
"-t=<num>" -> Seq(
"Provides period of logging topology snapshot (in seconds).",
"Default is 20."
),
"-s" -> Seq(
"Stops logging.",
"If logging is already stopped - it's no-op."
),
"-dl" -> Seq(
"Disables collecting of job and task fail events, cache rebalance events from remote nodes."
)
),
examples = Seq(
"log" ->
"Prints log status.",
"log -l -f=/home/user/visor-log" ->
"Starts logging to file 'visor-log' located at '/home/user'.",
"log -l -f=log/visor-log" ->
"Starts logging to file 'visor-log' located at '<Ignite home folder>/log'.",
("log -l -p=20",
"Starts logging to file '<Ignite home folder>/work/visor/visor-log' " +
"with querying events period of 20 seconds."),
("log -l -t=30",
"Starts logging to file '<Ignite home folder>/work/visor/visor-log' " +
"with topology snapshot logging period of 30 seconds."),
("log -l -dl",
"Starts logging to file '<Ignite home folder>/work/visor/visor-log' " +
"with disabled collection events from remote nodes."),
"log -s" ->
"Stops logging."
),
emptyArgs = log,
withArgs = log
)
logText("Visor started.")
// Print out log explanation at the beginning.
logText("<log>: H - Hosts")
logText("<log>: N - Nodes")
logText("<log>: C - CPUs")
logText("<log>: = - 5%-based marker of average CPU load across the topology")
logText("<log>: ^ - 5%-based marker of average heap memory used across the topology")
/**
* ==Command==
* Lists Visor console memory variables.
*
* ==Examples==
* <ex>mlist ac</ex>
* Lists variables that start with `a` or `c` from Visor console memory.
*
* <ex>mlist</ex>
* Lists all variables from Visor console memory.
*
* @param arg String that contains start characters of listed variables.
* If empty - all variables will be listed.
*/
def mlist(arg: String) {
assert(arg != null)
if (mem.isEmpty)
println("Memory is empty.")
else {
val r = if (arg.trim == "") mem.toMap else mem.filter { case (k, _) => arg.contains(k.charAt(0)) }
if (r.isEmpty)
println("No matches found.")
else {
val t = new VisorTextTable()
t.maxCellWidth = 70
t #= ("Name", "Value")
r.toSeq.sortBy(_._1).foreach { case (k, v) => t += (k, v) }
t.render()
nl()
println(
"Variable can be referenced in other commands with '@' prefix." + NL +
"Reference can be either a flag or a parameter value." + NL +
"\\nEXAMPLE: " + NL +
" 'help @cmd' - where 'cmd' variable contains command name." + NL +
" 'node -id8=@n11' - where 'n11' variable contains node ID8."
)
}
}
}
/**
* Shortcut for `println()`.
*/
def nl() {
println()
}
/**
* ==Command==
* Lists all Visor console memory.
*
* ==Examples==
* <ex>mlist</ex>
* Lists all variables in Visor console memory.
*/
def mlist() {
mlist("")
}
/**
* ==Command==
* Fills gap in Visor console memory variables.
*
* ==Examples==
* <ex>mcompact</ex>
* Fills gap in Visor console memory variables.
*/
def mcompact() {
val namespaces = Array("a", "c", "e", "n", "s", "t")
for (namespace <- namespaces) {
val vars = mem.filter { case (k, _) => k.matches(s"$namespace\\\\d+") }
if (vars.nonEmpty) {
clearNamespace(namespace)
vars.toSeq.sortBy(_._1).foreach { case (_, v) => setVar(v, namespace) }
}
}
}
/**
* Clears given Visor console variable or the whole namespace.
*
* @param arg Variable host or namespace mnemonic.
*/
def mclear(arg: String) {
assert(arg != null)
arg match {
case "-ev" => clearNamespace("e")
case "-al" => clearNamespace("a")
case "-ca" => clearNamespace("c")
case "-no" => clearNamespace("n")
case "-tn" => clearNamespace("t")
case "-ex" => clearNamespace("s")
case _ => mem.remove(arg)
}
}
/**
* Clears given variable namespace.
*
* @param namespace Namespace.
*/
private def clearNamespace(namespace: String) {
assert(namespace != null)
mem.keySet.foreach(k => {
if (k.matches(s"$namespace\\\\d+"))
mem.remove(k)
})
}
/**
* Clears all Visor console memory.
*/
def mclear() {
mem.clear()
}
/**
* Finds variables by its value.
*
* @param v Value to find by.
*/
def mfind(@Nullable v: String) = mem.filter(t => t._2 == v).toSeq
/**
* Finds variable by its value.
*
* @param v Value to find by.
*/
def mfindHead(@Nullable v: String) = mfind(v).filterNot(entry => Seq("nl", "nr").contains(entry._1)).headOption
/**
* Sets Visor console memory variable. Note that this method '''does not'''
* perform variable substitution on its parameters.
*
* @param n Name of the variable. Can't be `null`.
* @param v Value of the variable. Can't be `null`.
* @return Previous value.
*/
def mset(n: String, v: String): String = {
msetOpt(n, v).orNull
}
/**
* Sets Visor console memory variable. Note that this method '''does not'''
* perform variable substitution on its parameters.
*
* @param n Name of the variable. Can't be `null`.
* @param v Value of the variable. Can't be `null`.
* @return Previous value as an option.
*/
def msetOpt(n: String, v: String): Option[String] = {
assert(n != null)
assert(v != null)
val prev = mem.get(n)
mem.put(n, v)
Option(prev)
}
/**
* ==Command==
* Gets Visor console memory variable. Note that this method '''does not'''
* perform variable substitution on its parameters.
*
* ==Examples==
* <ex>mget @a</ex>
* Gets the value for Visor console variable '@a'.
*
* @param n Name of the variable.
* @return Variable value or `null` if such variable doesn't exist or its value was set as `null`.
*/
def mget(n: String) {
val key = if (n.startsWith("@")) n.substring(1) else n
if (mem.containsKey(key)) {
val t = new VisorTextTable()
t.maxCellWidth = 70
t #= ("Name", "Value")
t += (n, mem.get(key))
t.render()
nl()
}
else {
warn("Missing variable with name: \\'" + n + "\\'.")
}
}
/**
* Trap for missing arguments.
*/
def mget() {
warn("Missing argument.")
warn("Type 'help mget' to see how to use this command.")
}
/**
* ==Command==
* Gets Visor console memory variable. Note that this method '''does not'''
* perform variable substitution on its parameters.
*
* ==Examples==
* <ex>mgetOpt a</ex>
* Gets the value as an option for Visor console variable 'a'.
*
* @param n Name of the variable.
* @return Variable host as an option.
*/
def mgetOpt(n: String): Option[String] = {
assert(n != null)
Option(mem.get(n))
}
/**
* If variable with given value and prefix doesn't exist - creates
* a new variable with given value and returns its host. Otherwise,
* returns an existing variable host.
*
* @param v Value.
* @param prefix Variable host prefix.
* @return Existing variable host or the new variable host.
*/
def setVarIfAbsent(v: AnyRef, prefix: String): String = {
assert(v != null)
assert(prefix != null && prefix.length > 0)
val s = v.toString
val t = mem.find((t: (String, String)) => t._1.startsWith(prefix) && t._2 == s)
if (t.isDefined)
t.get._1
else {
for (i <- 0 until Int.MaxValue if mem.putIfAbsent(prefix + i, s) == null)
return prefix + i
throw new IgniteException("No more memory.")
}
}
/**
* Try get variable value with given name.
*
* @param v variable name.
* @return variable value or `v` if variable with name `v` not exist.
*/
def getVariable(v: String): String = {
v match {
case name if name.startsWith("@") => mgetOpt(name.substring(1)).getOrElse(v)
case _ => v
}
}
/**
* Creates a new variable with given value and returns its host.
*
* @param v Value.
* @param prefix Variable host prefix.
* @return New variable host.
*/
def setVar(v: AnyRef, prefix: String): String = {
assert(v != null)
assert(prefix != null && prefix.length > 0)
val s = v.toString
for (i <- 0 until Int.MaxValue if mem.putIfAbsent(prefix + i, s) == null)
return prefix + i
throw new IgniteException("No more memory.")
}
/**
* Adds command help to the Visor console. This will be printed as part of `help` command.
*
* @param name Command name.
* @param shortInfo Short command description.
* @param longInfo Optional multi-line long command description. If not provided - short description
* will be used instead.
* @param aliases List of aliases. Optional.
* @param spec Command specification.
* @param args List of `(host, description)` tuples for command arguments. Optional.
* @param examples List of `(example, description)` tuples for command examples.
* @param emptyArgs - command implementation with empty arguments.
* @param withArgs - command implementation with arguments.
*/
def addHelp(
name: String,
shortInfo: String,
@Nullable longInfo: Seq[String] = null,
@Nullable aliases: Seq[String] = Seq.empty,
spec: Seq[String],
@Nullable args: Seq[(String, AnyRef)] = null,
examples: Seq[(String, AnyRef)],
emptyArgs: () => Unit,
withArgs: (String) => Unit) {
assert(name != null)
assert(shortInfo != null)
assert(spec != null && spec.nonEmpty)
assert(examples != null && examples.nonEmpty)
assert(emptyArgs != null)
assert(withArgs != null)
// Add and re-sort
cmdLst = (cmdLst ++ Seq(VisorCommandHolder(name, shortInfo, longInfo, aliases, spec, args, examples, emptyArgs, withArgs))).
sortWith((a, b) => a.name.compareTo(b.name) < 0)
}
/**
* Extract node from command arguments.
*
* @param argLst Command arguments.
* @return Error message or node ref.
*/
def parseNode(argLst: ArgList) = {
val id8 = argValue("id8", argLst)
val id = argValue("id", argLst)
if (id8.isDefined && id.isDefined)
Left("Only one of '-id8' or '-id' is allowed.")
else if (id8.isDefined) {
nodeById8(id8.get) match {
case Nil => Left("Unknown 'id8' value: " + id8.get)
case node :: Nil => Right(Option(node))
case _ => Left("'id8' resolves to more than one node (use full 'id' instead): " + id8.get)
}
}
else if (id.isDefined) {
try {
val node = Option(ignite.cluster.node(java.util.UUID.fromString(id.get)))
if (node.isDefined)
Right(node)
else
Left("'id' does not match any node: " + id.get)
}
catch {
case e: IllegalArgumentException => Left("Invalid node 'id': " + id.get)
}
}
else
Right(None)
}
private[this] def parseArg(arg: String): Arg = {
if (arg(0) == '-' || arg(0) == '/') {
val eq = arg.indexOf('=')
if (eq == -1)
arg.substring(1) -> null
else {
val n = arg.substring(1, eq).trim
var v = arg.substring(eq + 1).trim.replaceAll("['\\"`]$", "").replaceAll("^['\\"`]", "")
if (v.startsWith("@"))
v = mgetOpt(v.substring(1)).getOrElse(v)
n -> v
}
}
else {
val k: String = null
val v = if (arg.startsWith("@"))
mgetOpt(arg.substring(1)).getOrElse(arg)
else
arg
k -> v
}
}
private val quotedArg = "(?:[-/].*=)?(['\\"`]).*".r
/**
* Utility method that parses command arguments. Arguments represented as a string
* into argument list represented as list of tuples (host, value) performing
* variable substitution:
*
* `-p=@n` - A named parameter where `@n` will be considered as a reference to variable named `n`.
* `@ n` - An unnamed parameter where `@n` will be considered as a reference to variable named `n`.
* `-p` - A flag doesn't support variable substitution.
*
* Note that recursive substitution isn't supported. If specified variable isn't set - the value
* starting with `@` will be used as-is.
*
* @param args Command arguments to parse.
*/
def parseArgs(@Nullable args: String): ArgList = {
val buf = collection.mutable.ArrayBuffer.empty[Arg]
if (args != null && args.trim.nonEmpty) {
val lst = args.trim.split(" ")
val sb = new StringBuilder()
for (i <- 0 until lst.size if lst(i).nonEmpty || sb.nonEmpty) {
val arg = sb.toString + lst(i)
arg match {
case quotedArg(quote) if arg.count(_ == quote(0)) % 2 != 0 && i + 1 < lst.size =>
sb.append(lst(i)).append(" ")
case _ =>
sb.clear()
buf += parseArg(arg)
}
}
}
buf
}
/**
* Shortcut method that checks if passed in argument list has an argument with given value.
*
* @param v Argument value to check for existence in this list.
* @param args Command argument list.
*/
def hasArgValue(@Nullable v: String, args: ArgList): Boolean = {
assert(args != null)
args.exists(_._2 == v)
}
/**
* Shortcut method that checks if passed in argument list has an argument with given host.
*
* @param n Argument host to check for existence in this list.
* @param args Command argument list.
*/
def hasArgName(@Nullable n: String, args: ArgList): Boolean = {
assert(args != null)
args.exists(_._1 == n)
}
/**
* Shortcut method that checks if flag (non-`null` host and `null` value) is set
* in the argument list.
*
* @param n Name of the flag.
* @param args Command argument list.
*/
def hasArgFlag(n: String, args: ArgList): Boolean = {
assert(n != null && args != null)
args.exists((a) => a._1 == n && a._2 == null)
}
/**
* Gets the value for a given argument host.
*
* @param n Argument host.
* @param args Argument list.
* @return Argument value.
*/
@Nullable def argValue(n: String, args: ArgList): Option[String] = {
assert(n != null && args != null)
Option((args find(_._1 == n) getOrElse Til)._2)
}
/**
* Gets a non-`null` value for given parameter.
*
* @param a Parameter.
* @param dflt Value to return if `a` is `null`.
*/
def safe(@Nullable a: Any, dflt: Any = NA) = {
assert(dflt != null)
if (a != null) a.toString else dflt.toString
}
/**
* Joins array elements to string.
*
* @param arr Array.
* @param dflt Value to return if `arr` is `null` or empty.
* @return String.
*/
def arr2Str[T](arr: Array[T], dflt: Any = NA) =
if (arr != null && arr.length > 0) U.compact(arr.mkString(", ")) else dflt.toString
/**
* Converts `Boolean` to 'on'/'off' string.
*
* @param bool Boolean value.
* @return String.
*/
def bool2Str(bool: Boolean) = if (bool) "on" else "off"
/**
* Converts `java.lang.Boolean` to 'on'/'off' string.
*
* @param bool Boolean value.
* @param ifNull Default value in case if `bool` is `null`.
* @return String.
*/
def javaBoolToStr(bool: JavaBoolean, ifNull: Boolean = false) =
bool2Str(if (bool == null) ifNull else bool.booleanValue())
/**
* Reconstructs string presentation for given argument.
*
* @param arg Argument to reconstruct.
*/
@Nullable def makeArg(arg: Arg): String = {
assert(arg != null)
var s = ""
if (arg._1 != null) {
s = "-" + arg._1
if (arg._2 != null)
s = s + '=' + arg._2
}
else
s = arg._2
s
}
/**
* Reconstructs string presentation for given argument list.
*
* @param args Argument list to reconstruct.
*/
def makeArgs(args: ArgList): String = {
assert(args != null)
("" /: args)((b, a) => if (b.length == 0) makeArg(a) else b + ' ' + makeArg(a))
}
/**
* Parses string containing mnemonic predicate and returns Scala predicate.
*
* @param s Mnemonic predicate.
* @return Long to Boolean predicate or null if predicate cannot be created.
*/
def makeExpression(s: String): Option[Long => Boolean] = {
assert(s != null)
def value(v: String): Long =
// Support for seconds, minutes and hours.
// NOTE: all memory sizes are assumed to be in MB.
v.last match {
case 's' => v.substring(0, v.length - 1).toLong * 1000
case 'm' => v.substring(0, v.length - 1).toLong * 1000 * 60
case 'h' => v.substring(0, v.length - 1).toLong * 1000 * 60 * 60
case _ => v.toLong
}
try
Option(
if (s == null)
null
else if (s.startsWith("lte")) // <=
_ <= value(s.substring(3))
else if (s.startsWith("lt")) // <
_ < value(s.substring(2))
else if (s.startsWith("gte")) // >=
_ >= value(s.substring(3))
else if (s.startsWith("gt")) // >
_ > value(s.substring(2))
else if (s.startsWith("eq")) // ==
_ == value(s.substring(2))
else if (s.startsWith("neq")) // !=
_ != value(s.substring(3))
else
null
)
catch {
case e: Throwable => None
}
}
// Formatters.
private val dblFmt = new DecimalFormat("#0.00", DEC_FMT_SYMS)
private val intFmt = new DecimalFormat("#0", DEC_FMT_SYMS)
/**
* Formats double value with `#0.00` formatter.
*
* @param d Double value to format.
*/
def formatDouble(d: Double): String = {
dblFmt.format(d)
}
/**
* Formats double value with `#0` formatter.
*
* @param d Double value to format.
*/
def formatInt(d: Double): String = {
intFmt.format(d.round)
}
/**
* Returns string representation of the timestamp provided. Result formatted
* using pattern `MM/dd/yy, HH:mm:ss`.
*
* @param ts Timestamp.
*/
def formatDateTime(ts: Long): String =
dtFmt.format(ts)
/**
* Returns string representation of the date provided. Result formatted using
* pattern `MM/dd/yy, HH:mm:ss`.
*
* @param date Date.
*/
def formatDateTime(date: Date): String =
dtFmt.format(date)
/**
* Returns string representation of the timestamp provided. Result formatted
* using pattern `MM/dd/yy`.
*
* @param ts Timestamp.
*/
def formatDate(ts: Long): String =
dFmt.format(ts)
/**
* Returns string representation of the date provided. Result formatted using
* pattern `MM/dd/yy`.
*
* @param date Date.
*/
def formatDate(date: Date): String =
dFmt.format(date)
/**
* Base class for memory units.
*
* @param name Unit name to display on screen.
* @param base Unit base to convert from bytes.
*/
private[this] sealed abstract class VisorMemoryUnit(name: String, val base: Long) {
/**
* Convert memory in bytes to memory in units.
*
* @param m Memory in bytes.
* @return Memory in units.
*/
def toUnits(m: Long): Double = m.toDouble / base
/**
* Check if memory fits measure units.
*
* @param m Memory in bytes.
* @return `True` if memory is more than `1` after converting bytes to units.
*/
def has(m: Long): Boolean = toUnits(m) >= 1
override def toString = name
}
private[this] case object BYTES extends VisorMemoryUnit("b", 1)
private[this] case object KILOBYTES extends VisorMemoryUnit("kb", 1024L)
private[this] case object MEGABYTES extends VisorMemoryUnit("mb", 1024L * 1024L)
private[this] case object GIGABYTES extends VisorMemoryUnit("gb", 1024L * 1024L * 1024L)
private[this] case object TERABYTES extends VisorMemoryUnit("tb", 1024L * 1024L * 1024L * 1024L)
/**
* Detect memory measure units: from BYTES to TERABYTES.
*
* @param m Memory in bytes.
* @return Memory measure units.
*/
private[this] def memoryUnit(m: Long): VisorMemoryUnit =
if (TERABYTES.has(m))
TERABYTES
else if (GIGABYTES.has(m))
GIGABYTES
else if (MEGABYTES.has(m))
MEGABYTES
else if (KILOBYTES.has(m))
KILOBYTES
else
BYTES
/**
* Returns string representation of the memory.
*
* @param n Memory size.
*/
def formatMemory(n: Long): String = {
if (n > 0) {
val u = memoryUnit(n)
kbFmt.format(u.toUnits(n)) + u.toString
}
else
"0"
}
/**
* Returns string representation of the memory limit.
*
* @param n Memory size.
*/
def formatMemoryLimit(n: Long): String = {
n match {
case -1 => "Disabled"
case 0 => "Unlimited"
case m => formatMemory(m)
}
}
/**
* Returns string representation of the number.
*
* @param n Number.
*/
def formatNumber(n: Long): String =
nmFmt.format(n)
/**
* Tests whether or not Visor console is connected.
*
* @return `True` if Visor console is connected.
*/
def isConnected =
isCon
/**
* Gets timestamp of Visor console connection. Returns `0` if Visor console is not connected.
*
* @return Timestamp of Visor console connection.
*/
def connectTimestamp =
conTs
/**
* Prints properly formatted error message like:
* {{{
* (wrn) <visor>: warning message
* }}}
*
* @param warnMsgs Error messages to print. If `null` - this function is no-op.
*/
def warn(warnMsgs: Any*) {
assert(warnMsgs != null)
warnMsgs.foreach(line => println(s"(wrn) <visor>: $line"))
}
/**
* Prints standard 'not connected' error message.
*/
def adviseToConnect() {
warn(
"Visor is disconnected.",
"Type 'open' to connect Visor console or 'help open' to get help."
)
}
/**
* Gets global projection as an option.
*/
def gridOpt =
Option(ignite)
def noop() {}
/**
* ==Command==
* Prints Visor console status.
*
* ==Example==
* <ex>status -q</ex>
* Prints Visor console status without ASCII logo.
*
* @param args Optional "-q" flag to disable ASCII logo printout.
*/
def status(args: String) {
val t = VisorTextTable()
t += ("Status", if (isCon) "Connected" else "Disconnected")
t += ("Ignite instance name",
if (ignite == null)
NA
else {
val n = ignite.name
escapeName(n)
}
)
t += ("Config path", safe(cfgPath))
t += ("Uptime", if (isCon) X.timeSpan2HMS(uptime) else NA)
t.render()
}
/**
* ==Command==
* Prints Visor console status (with ASCII logo).
*
* ==Example==
* <ex>status</ex>
* Prints Visor console status.
*/
def status() {
status("")
}
/**
* ==Command==
* Prints help for specific command(s) or for all commands.
*
* ==Example==
* <ex>help</ex>
* Prints general help.
*
* <ex>help open</ex>
* Prints help for 'open' command.
*
* @param args List of commands to print help for. If empty - prints generic help.
*/
def help(args: String = null) {
val argLst = parseArgs(args)
if (!has(argLst)) {
val t = VisorTextTable()
t.autoBorder = false
t.maxCellWidth = 55
t #= ("Command", "Description")
cmdLst foreach (hlp => t += (hlp.nameWithAliases, hlp.shortInfo))
t.render()
println("\\nType 'help \\"command name\\"' to see how to use this command.")
}
else
for (c <- argLst)
if (c._1 != null)
warn("Invalid command name: " + argName(c))
else if (c._2 == null)
warn("Invalid command name: " + argName(c))
else {
val n = c._2
val opt = cmdLst.find(_.name == n)
if (opt.isEmpty)
warn("Invalid command name: " + n)
else {
val hlp: VisorCommandHolder = opt.get
val t = VisorTextTable()
t += (hlp.nameWithAliases, if (hlp.longInfo == null) hlp.shortInfo else hlp.longInfo)
t.render()
println("\\nSPECIFICATION:")
hlp.spec foreach(s => println(blank(4) + s))
if (has(hlp.args)) {
println("\\nARGUMENTS:")
hlp.args foreach (a => {
val (arg, desc) = a
println(blank(4) + arg)
desc match {
case (lines: Iterable[_]) => lines foreach (line => println(blank(8) + line))
case s: AnyRef => println(blank(8) + s.toString)
}
})
}
if (has(hlp.examples)) {
println("\\nEXAMPLES:")
hlp.examples foreach (a => {
val (ex, desc) = a
println(blank(4) + ex)
desc match {
case (lines: Iterable[_]) => lines foreach (line => println(blank(8) + line))
case s: AnyRef => println(blank(8) + s.toString)
}
})
}
nl()
}
}
}
/**
* Tests whether passed in sequence is not `null` and not empty.
*/
private def has[T](@Nullable s: Seq[T]): Boolean = {
s != null && s.nonEmpty
}
/**
* ==Command==
* Prints generic help.
*
* ==Example==
* <ex>help</ex>
* Prints help.
*/
def help() {
help("")
}
/**
* Helper function that makes up the full argument host from tuple.
*
* @param t Command argument tuple.
*/
def argName(t: (String, String)): String =
if (F.isEmpty(t._1) && F.isEmpty(t._2))
"<empty>"
else if (F.isEmpty(t._1))
t._2
else
t._1
/**
* Helper method that produces blank string of given length.
*
* @param len Length of the blank string.
*/
private def blank(len: Int) = new String().padTo(len, ' ')
/**
* Connects Visor console to configuration with path.
*
* @param igniteInstanceName Name of Ignite instance.
* @param cfgPath Configuration path.
*/
def open(igniteInstanceName: String, cfgPath: String) {
this.cfgPath = cfgPath
ignite =
try
Ignition.ignite(igniteInstanceName).asInstanceOf[IgniteEx]
catch {
case _: IllegalStateException =>
this.cfgPath = null
throw new IgniteException("Named Ignite instance unavailable: " + igniteInstanceName)
}
assert(cfgPath != null)
isCon = true
conOwner = true
conTs = System.currentTimeMillis
ignite.cluster.nodes().foreach(n => {
setVarIfAbsent(nid8(n), "n")
val ip = sortAddresses(n.addresses()).headOption
if (ip.isDefined)
setVarIfAbsent(ip.get, "h")
})
val onHost = ignite.cluster.forHost(ignite.localNode())
Option(onHost.forServers().forOldest().node()).foreach(n => msetOpt("nl", nid8(n)))
Option(ignite.cluster.forOthers(onHost).forServers.forOldest().node()).foreach(n => msetOpt("nr", nid8(n)))
nodeJoinLsnr = new IgnitePredicate[Event]() {
override def apply(e: Event): Boolean = {
e match {
case de: DiscoveryEvent =>
val n = nid8(de.eventNode())
setVarIfAbsent(n, "n")
val node = ignite.cluster.node(de.eventNode().id())
if (node != null) {
val alias = if (U.sameMacs(ignite.localNode(), node)) "nl" else "nr"
if (mgetOpt(alias).isEmpty)
msetOpt(alias, n)
val ip = sortAddresses(node.addresses).headOption
if (ip.isDefined)
setVarIfAbsent(ip.get, "h")
}
else {
warn(
"New node not found: " + de.eventNode().id(),
"Visor must have discovery configuration and local " +
"host bindings identical with grid nodes."
)
}
}
true
}
}
ignite.events().localListen(nodeJoinLsnr, EVT_NODE_JOINED)
val mclear = (node: ClusterNode) => {
mfind(nid8(node)).foreach(nv => mem.remove(nv._1))
val onHost = ignite.cluster.forHost(ignite.localNode())
if (mgetOpt("nl").isEmpty)
Option(onHost.forServers().forOldest().node()).foreach(n => msetOpt("nl", nid8(n)))
if (mgetOpt("nr").isEmpty)
Option(ignite.cluster.forOthers(onHost).forServers.forOldest().node()).foreach(n => msetOpt("nr", nid8(n)))
if (onHost.nodes().isEmpty)
sortAddresses(node.addresses).headOption.foreach((ip) => mfind(ip).foreach(hv => mem.remove(hv._1)))
}
nodeLeftLsnr = new IgnitePredicate[Event]() {
override def apply(e: Event): Boolean = {
e match {
case (de: DiscoveryEvent) => mclear(de.eventNode())
}
true
}
}
ignite.events().localListen(nodeLeftLsnr, EVT_NODE_LEFT, EVT_NODE_FAILED)
nodeSegLsnr = new IgnitePredicate[Event] {
override def apply(e: Event): Boolean = {
e match {
case de: DiscoveryEvent =>
if (de.eventNode().id() == ignite.localNode.id) {
warn("Closing Visor console due to topology segmentation.")
warn("Contact your system administrator.")
nl()
close()
}
else
mclear(de.eventNode())
}
true
}
}
ignite.events().localListen(nodeSegLsnr, EVT_NODE_SEGMENTED)
nodeStopLsnr = new IgnitionListener {
def onStateChange(name: String, state: IgniteState) {
if (name == ignite.name && state == IgniteState.STOPPED) {
warn("Closing Visor console due to stopping of host grid instance.")
nl()
close()
}
}
}
Ignition.addListener(nodeStopLsnr)
logText("Visor joined topology: " + cfgPath)
logText("All live nodes, if any, will re-join.")
nl()
val t = VisorTextTable()
// Print advise.
println("Some useful commands:")
t += ("Type 'top'", "to see full topology.")
t += ("Type 'node'", "to see node statistics.")
t += ("Type 'cache'", "to see cache statistics.")
t += ("Type 'tasks'", "to see tasks statistics.")
t += ("Type 'config'", "to see node configuration.")
t.render()
println("\\nType 'help' to get help.\\n")
status()
}
/**
* Returns string with node id8, its memory variable, if available, and its
* IP address (first internal address), if node is alive.
*
* @param id Node ID.
* @return String.
*/
def nodeId8Addr(id: UUID): String = {
assert(id != null)
assert(isCon)
val g = ignite
if (g != null && g.localNode.id == id)
"<visor>"
else {
val n = ignite.cluster.node(id)
val id8 = nid8(id)
var v = mfindHead(id8)
if(v.isEmpty){
v = assignNodeValue(n)
}
id8 +
(if (v.isDefined) "(@" + v.get._1 + ")" else "" )+
", " +
(if (n == null) NA else sortAddresses(n.addresses).headOption.getOrElse(NA))
}
}
def assignNodeValue(node: ClusterNode): Option[(String, String)] = {
assert(node != null)
val id8 = nid8(node.id())
setVarIfAbsent(id8, "n")
val alias = if (U.sameMacs(ignite.localNode(), node)) "nl" else "nr"
if (mgetOpt(alias).isEmpty)
msetOpt(alias, nid8(node.id()))
val ip = sortAddresses(node.addresses).headOption
if (ip.isDefined)
setVarIfAbsent(ip.get, "h")
mfindHead(id8)
}
/**
* Returns string with node id8 and its memory variable, if available.
*
* @param id Node ID.
* @return String.
*/
def nodeId8(id: UUID): String = {
assert(id != null)
assert(isCon)
val id8 = nid8(id)
val v = mfindHead(id8)
id8 + (if (v.isDefined) "(@" + v.get._1 + ")" else "")
}
/**
* Guards against invalid percent readings.
*
* @param v Value in '%' to guard.
* @return Percent as string. Any value below `0` and greater than `100` will return `<n/a>` string.
*/
def safePercent(v: Double): String = if (v < 0 || v > 100) NA else formatDouble(v) + " %"
/** Convert to task argument. */
def emptyTaskArgument[A](nid: UUID): VisorTaskArgument[Void] = new VisorTaskArgument(nid, false)
def emptyTaskArgument[A](nids: Iterable[UUID]): VisorTaskArgument[Void] =
new VisorTaskArgument(new JavaHashSet(nids), false)
/** Convert to task argument. */
def toTaskArgument[A](nid: UUID, arg: A): VisorTaskArgument[A] = new VisorTaskArgument(nid, arg, false)
/** Convert to task argument. */
def toTaskArgument[A](nids: Iterable[UUID], arg: A): VisorTaskArgument[A] =
new VisorTaskArgument(new JavaHashSet(nids), arg, false)
@throws[ClusterGroupEmptyException]("In case of empty topology.")
private def execute[A, R, J](grp: ClusterGroup, task: Class[_ <: VisorMultiNodeTask[A, R, J]], arg: A): R = {
if (grp.nodes().isEmpty)
throw new ClusterGroupEmptyException("Topology is empty.")
ignite.compute(grp).withNoFailover().execute(task, toTaskArgument(grp.nodes().map(_.id()), arg))
}
/**
* Execute task on node.
*
* @param nid Node id.
* @param task Task class
* @param arg Task argument.
* @tparam A Task argument type.
* @tparam R Task result type
* @tparam J Job class.
* @return Task result.
*/
@throws[ClusterGroupEmptyException]("In case of empty topology.")
def executeOne[A, R, J](nid: UUID, task: Class[_ <: VisorMultiNodeTask[A, R, J]], arg: A): R =
execute(ignite.cluster.forNodeId(nid), task, arg)
/**
* Execute task on random node from specified cluster group.
*
* @param grp Cluster group to take rundom node from
* @param task Task class
* @param arg Task argument.
* @tparam A Task argument type.
* @tparam R Task result type
* @tparam J Job class.
* @return Task result.
*/
@throws[ClusterGroupEmptyException]("In case of empty topology.")
def executeRandom[A, R, J](grp: ClusterGroup, task: Class[_ <: VisorMultiNodeTask[A, R, J]], arg: A): R =
execute(grp.forRandom(), task, arg)
/**
* Execute task on random node.
*
* @param task Task class
* @param arg Task argument.
* @tparam A Task argument type.
* @tparam R Task result type
* @tparam J Job class.
* @return Task result.
*/
@throws[ClusterGroupEmptyException]("In case of empty topology.")
def executeRandom[A, R, J](task: Class[_ <: VisorMultiNodeTask[A, R, J]], arg: A): R =
execute(ignite.cluster.forRandom(), task, arg)
/**
* Execute task on specified nodes.
*
* @param nids Node ids.
* @param task Task class
* @param arg Task argument.
* @tparam A Task argument type.
* @tparam R Task result type
* @tparam J Job class.
* @return Task result.
*/
@throws[ClusterGroupEmptyException]("In case of empty topology.")
def executeMulti[A, R, J](nids: Iterable[UUID], task: Class[_ <: VisorMultiNodeTask[A, R, J]], arg: A): R =
execute(ignite.cluster.forNodeIds(nids), task, arg)
/**
* Execute task on all nodes.
*
* @param task Task class
* @param arg Task argument.
* @tparam A Task argument type.
* @tparam R Task result type
* @tparam J Job class.
* @return Task result.
*/
@throws[ClusterGroupEmptyException]("In case of empty topology.")
def executeMulti[A, R, J](task: Class[_ <: VisorMultiNodeTask[A, R, J]], arg: A): R =
execute(ignite.cluster.forRemotes(), task, arg)
/**
* Gets caches configurations from specified node.
*
* @param nid Node ID to collect configuration from.
* @return Collection of cache configurations.
*/
@throws[ClusterGroupEmptyException]("In case of empty topology.")
def cacheConfigurations(nid: UUID): JavaCollection[VisorCacheConfiguration] =
executeOne(nid, classOf[VisorCacheConfigurationCollectorTask],
new VisorCacheConfigurationCollectorTaskArg(null.asInstanceOf[JavaCollection[String]])).values()
/**
* Asks user to select a node from the list.
*
* @param title Title displayed before the list of nodes.
* @return `Option` for ID of selected node.
*/
def askForNode(title: String): Option[UUID] = {
assert(title != null)
assert(isCon)
val t = VisorTextTable()
t #= ("#", "Node ID8(@), IP","Node Type", "Up Time", "CPUs", "CPU Load", "Free Heap")
val nodes = ignite.cluster.nodes().toList
if (nodes.isEmpty) {
warn("Topology is empty.")
None
}
else if (nodes.size == 1)
Some(nodes.head.id)
else {
nodes.indices foreach (i => {
val n = nodes(i)
val m = n.metrics
val usdMem = m.getHeapMemoryUsed
val maxMem = m.getHeapMemoryMaximum
val freeHeapPct = (maxMem - usdMem) * 100 / maxMem
val cpuLoadPct = m.getCurrentCpuLoad * 100
t += (
i,
nodeId8Addr(n.id),
if (n.isClient) "Client" else "Server",
X.timeSpan2HMS(m.getUpTime),
n.metrics.getTotalCpus,
safePercent(cpuLoadPct),
formatDouble(freeHeapPct) + " %"
)
})
println(title)
t.render()
val a = ask("\\nChoose node number ('c' to cancel) [0]: ", "0")
if (a.toLowerCase == "c")
None
else {
try
Some(nodes(a.toInt).id)
catch {
case e: Throwable =>
warn("Invalid selection: " + a)
None
}
}
}
}
/**
* Asks user to select a host from the list.
*
* @param title Title displayed before the list of hosts.
* @return `Option` for projection of nodes located on selected host.
*/
def askForHost(title: String): Option[ClusterGroup] = {
assert(title != null)
assert(isCon)
val t = VisorTextTable()
t #= ("#", "Int./Ext. IPs", "Node ID8(@)", "OS", "CPUs", "MACs", "CPU Load")
val neighborhood = U.neighborhood(ignite.cluster.nodes()).values().toIndexedSeq
if (neighborhood.isEmpty) {
warn("Topology is empty.")
None
}
else {
neighborhood.indices foreach (i => {
val neighbors = neighborhood(i)
var ips = immutable.Set.empty[String]
var id8s = Seq.empty[String]
var macs = immutable.Set.empty[String]
var cpuLoadSum = 0.0
val n1 = neighbors.head
assert(n1 != null)
val cpus = n1.metrics.getTotalCpus
val os = "" +
n1.attribute("os.name") + " " +
n1.attribute("os.arch") + " " +
n1.attribute("os.version")
neighbors.foreach(n => {
id8s = id8s :+ nodeId8(n.id)
ips = ips ++ n.addresses
cpuLoadSum += n.metrics().getCurrentCpuLoad
macs = macs ++ n.attribute[String](ATTR_MACS).split(", ").map(_.grouped(2).mkString(":"))
})
t += (
i,
ips.toSeq,
id8s,
os,
cpus,
macs.toSeq,
safePercent(cpuLoadSum / neighbors.size() * 100)
)
})
println(title)
t.render()
val a = ask("\\nChoose host number ('c' to cancel) [0]: ", "0")
if (a.toLowerCase == "c")
None
else {
try
Some(ignite.cluster.forNodes(neighborhood(a.toInt)))
catch {
case e: Throwable =>
warn("Invalid selection: " + a)
None
}
}
}
}
/**
* Asks user to choose configuration file.
*
* @return `Option` for file path.
*/
def askConfigFile(): Option[String] = {
val files = GridConfigurationFinder.getConfigFiles
if (files.isEmpty) {
warn("No configuration files found.")
None
}
else {
val t = VisorTextTable()
t #= ("#", "Configuration File")
(0 until files.size).foreach(i => t += (i, files(i).get1()))
println("Local configuration files:")
t.render()
val a = ask("\\nChoose configuration file number ('c' to cancel) [0]: ", "0")
if (a.toLowerCase == "c")
None
else {
try
Some(files(a.toInt).get3.getPath)
catch {
case e: Throwable =>
nl()
warn("Invalid selection: " + a)
None
}
}
}
}
/**
* Asks user input.
*
* @param prompt Prompt string.
* @param dflt Default value for user input.
* @param passwd If `true`, input will be masked with '*' character. `false` by default.
*/
def ask(prompt: String, dflt: String, passwd: Boolean = false): String = {
assert(prompt != null)
assert(dflt != null)
if (batchMode)
return dflt
readLineOpt(prompt, if (passwd) Some('*') else None) match {
case None => dflt
case Some(s) if s.length == 0 => dflt
case Some(s) => s
}
}
/**
* Safe `readLine` version.
*
* @param prompt User prompt.
* @param mask Mask character (if `None`, no masking will be applied).
*/
private def readLineOpt(prompt: String, mask: Option[Char] = None): Option[String] = {
assert(reader != null)
try {
Option(mask.fold(reader.readLine(prompt))(reader.readLine(prompt, _)))
}
catch {
case _: Throwable => None
}
}
/**
* Asks user to choose node id8.
*
* @return `Option` for node id8.
*/
def askNodeId(): Option[String] = {
assert(isConnected)
val ids = ignite.cluster.forRemotes().nodes().map(nid8).toList
ids.indices.foreach(i => println((i + 1) + ": " + ids(i)))
nl()
println("C: Cancel")
nl()
readLineOpt("Choose node: ") match {
case Some("c") | Some("C") | None => None
case Some(idx) =>
try
Some(ids(idx.toInt - 1))
catch {
case e: Throwable =>
if (idx.isEmpty)
warn("Index can't be empty.")
else
warn("Invalid index: " + idx + ".")
None
}
}
}
/**
* Adds close callback. Added function will be called every time
* command `close` is called.
*
* @param f Close callback to add.
*/
def addShutdownCallback(f: () => Unit) {
assert(f != null)
shutdownCbs = shutdownCbs :+ f
}
/**
* Adds close callback. Added function will be called every time
* command `close` is called.
*
* @param f Close callback to add.
*/
def addCloseCallback(f: () => Unit) {
assert(f != null)
cbs = cbs :+ f
}
/**
* Removes close callback.
*
* @param f Close callback to remove.
*/
def removeCloseCallback(f: () => Unit) {
assert(f != null)
cbs = cbs.filter(_ != f)
}
/**
* Removes all close callbacks.
*/
def removeCloseCallbacks() {
cbs = Seq.empty[() => Unit]
}
/**
* Gets visor uptime.
*/
def uptime = if (isCon) System.currentTimeMillis() - conTs else -1L
/**
* ==Command==
* Disconnects visor.
*
* ==Examples==
* <ex>close</ex>
* Disconnects from the grid.
*/
def close() {
if (!isConnected)
adviseToConnect()
else {
if (pool != null) {
pool.shutdown()
try
if (!pool.awaitTermination(5, TimeUnit.SECONDS))
pool.shutdownNow
catch {
case e: InterruptedException =>
pool.shutdownNow
Thread.currentThread.interrupt()
}
pool = new IgniteThreadPoolExecutor(
Runtime.getRuntime().availableProcessors(),
Runtime.getRuntime().availableProcessors(),
0L,
new LinkedBlockingQueue[Runnable](),
new IgniteThreadFactory("visorInstance", "visor")
)
}
// Call all close callbacks.
cbs foreach(_.apply())
if (ignite != null && Ignition.state(ignite.name) == IgniteState.STARTED) {
if (nodeJoinLsnr != null)
ignite.events().stopLocalListen(nodeJoinLsnr)
if (nodeLeftLsnr != null)
ignite.events().stopLocalListen(nodeLeftLsnr)
if (nodeSegLsnr != null)
ignite.events().stopLocalListen(nodeSegLsnr)
}
if (nodeStopLsnr != null)
Ignition.removeListener(nodeStopLsnr)
if (ignite != null && conOwner)
try
Ignition.stop(ignite.name, true)
catch {
case e: Exception => warn(e.getMessage)
}
// Fall through and treat Visor console as closed
// even in case when grid didn't stop properly.
logText("Visor left topology.")
if (logStarted) {
stopLog()
nl()
}
isCon = false
conOwner = false
conTs = 0
ignite = null
nodeJoinLsnr = null
nodeLeftLsnr = null
nodeSegLsnr = null
nodeStopLsnr = null
cfgPath = null
// Clear the memory.
mclear()
nl()
status()
}
}
/**
* ==Command==
* quit from Visor console.
*
* ==Examples==
* <ex>quit</ex>
* Quit from Visor console.
*/
def quit() {
System.exit(0)
}
/**
* ==Command==
* Prints log status.
*
* ==Examples==
* <ex>log</ex>
* Prints log status.
*/
def log() {
val t = VisorTextTable()
t += ("Status", if (logStarted) "Started" else "Stopped")
if (logStarted) {
t += ("File path", logFile.getAbsolutePath)
t += ("File size", if (logFile.exists) formatMemory(logFile.length()))
}
t.render()
}
/**
* ==Command==
* Starts or stops logging.
*
* ==Examples==
* <ex>log -l -f=/home/user/visor-log</ex>
* Starts logging to file `visor-log` located at `/home/user`.
* <br>
* <ex>log -l -f=log/visor-log</ex>
* Starts logging to file `visor-log` located at <`Ignite home folder`>`/log`.
* <br>
* <ex>log -l -p=20</ex>
* Starts logging with querying events period of 20 seconds.
* <br>
* <ex>log -l -t=30</ex>
* Starts logging with topology snapshot logging period of 30 seconds.
* <br>
* <ex>log -s</ex>
* Stops logging.
*
* @param args Command arguments.
*/
def log(args: String) {
assert(args != null)
if (!isConnected)
adviseToConnect()
else {
def scold(errMsgs: Any*) {
assert(errMsgs != null)
warn(errMsgs: _*)
warn("Type 'help log' to see how to use this command.")
}
val argLst = parseArgs(args)
if (hasArgFlag("s", argLst))
if (!logStarted)
scold("Logging was not started.")
else
stopLog()
else if (hasArgFlag("l", argLst))
if (logStarted)
scold("Logging is already started.")
else
try
startLog(argValue("f", argLst), argValue("p", argLst), argValue("t", argLst),
hasArgFlag("dl", argLst))
catch {
case e: Exception => scold(e)
}
else
scold("Invalid arguments.")
}
}
/**
* Stops logging.
*/
private def stopLog() {
assert(logStarted)
logText("Log stopped.")
if (logTimer != null) {
logTimer.cancel()
logTimer.purge()
logTimer = null
}
if (topTimer != null) {
topTimer.cancel()
topTimer.purge()
topTimer = null
}
logStarted = false
println("<visor>: Log stopped: " + logFile.getAbsolutePath)
}
/** Unique Visor key to get events last order. */
final val EVT_LAST_ORDER_KEY = UUID.randomUUID().toString
/** Unique Visor key to get events throttle counter. */
final val EVT_THROTTLE_CNTR_KEY = UUID.randomUUID().toString
/**
* Starts logging. If logging is already started - no-op.
*
* @param pathOpt `Option` for log file path. If `None` - default is used.
* @param freqOpt `Option` for events fetching frequency If `None` - default is used.
* @param topFreqOpt `Option` for topology refresh frequency.
* @param rmtLogDisabled `True` if no events collected from remote nodes.
*/
private def startLog(pathOpt: Option[String], freqOpt: Option[String], topFreqOpt: Option[String],
rmtLogDisabled: Boolean) {
assert(pathOpt != null)
assert(freqOpt != null)
assert(!logStarted)
val path = pathOpt.getOrElse(DFLT_LOG_PATH)
val f = new File(path)
if (f.exists() && f.isDirectory)
throw new IllegalArgumentException("Specified path is a folder. Please input valid file path.")
val folder = Option(f.getParent).getOrElse("")
val fileName = f.getName
logFile = new File(U.resolveWorkDirectory(U.defaultWorkDirectory(), folder, false), fileName)
logFile.createNewFile()
if (!logFile.canWrite)
throw new IllegalArgumentException("Not enough permissions to write a log file.")
var freq = 0L
try
freq = freqOpt.getOrElse("10").toLong * 1000L
catch {
case e: NumberFormatException =>
throw new IllegalArgumentException("Invalid frequency: " + freqOpt.get)
}
if (freq <= 0)
throw new IllegalArgumentException("Frequency must be positive: " + freq)
if (freq > 60000)
warn("Frequency greater than a minute is too low (ignoring).")
var topFreq = 0L
try
topFreq = topFreqOpt.getOrElse("20").toLong * 1000L
catch {
case e: NumberFormatException =>
throw new IllegalArgumentException("Invalid topology frequency: " + topFreqOpt.get)
}
if (topFreq <= 0)
throw new IllegalArgumentException("Topology frequency must be positive: " + topFreq)
// Unique key for this JVM.
val key = UUID.randomUUID().toString + System.identityHashCode(classOf[java.lang.Object]).toString
logTimer = new Timer(true)
logTimer.schedule(new TimerTask() {
/** Events to be logged by Visor console (additionally to discovery events). */
private final val LOG_EVTS = Array(
EVT_JOB_TIMEDOUT,
EVT_JOB_FAILED,
EVT_JOB_FAILED_OVER,
EVT_JOB_REJECTED,
EVT_JOB_CANCELLED,
EVT_TASK_TIMEDOUT,
EVT_TASK_FAILED,
EVT_TASK_DEPLOY_FAILED,
EVT_TASK_DEPLOYED,
EVT_TASK_UNDEPLOYED,
EVT_CACHE_REBALANCE_STARTED,
EVT_CACHE_REBALANCE_STOPPED,
EVT_CLASS_DEPLOY_FAILED
)
override def run() {
if (ignite != null) {
try {
// Discovery events collected only locally.
val loc = collectEvents(ignite, EVT_LAST_ORDER_KEY, EVT_THROTTLE_CNTR_KEY,
LOG_EVTS ++ EVTS_DISCOVERY, new VisorEventMapper).toSeq
val evts = if (!rmtLogDisabled)
loc ++ executeMulti(classOf[VisorNodeEventsCollectorTask],
VisorNodeEventsCollectorTaskArg.createLogArg(key, LOG_EVTS)).toSeq
else
loc
if (evts.nonEmpty) {
var out: FileWriter = null
try {
out = new FileWriter(logFile, true)
evts.toList.sortBy(_.getTimestamp).foreach(e => {
logImpl(
out,
formatDateTime(e.getTimestamp),
nodeId8Addr(e.getNid),
U.compact(e.getShortDisplay)
)
if (EVTS_DISCOVERY.contains(e.getTypeId))
snapshot()
})
}
finally {
U.close(out, null)
}
}
}
catch {
case _: ClusterGroupEmptyCheckedException => // Ignore.
case e: Exception => logText("Failed to collect log.")
}
}
}
}, freq, freq)
topTimer = new Timer(true)
topTimer.schedule(new TimerTask() {
override def run() {
snapshot()
}
}, topFreq, topFreq)
logStarted = true
logText("Log started.")
println("<visor>: Log started: " + logFile.getAbsolutePath)
}
/**
* Does topology snapshot.
*/
private def snapshot() {
val g = ignite
if (g != null)
try
drawBar(g.cluster.metrics())
catch {
case e: ClusterGroupEmptyCheckedException => logText("Topology is empty.")
case e: Exception => ()
}
}
/**
*
* @param m Projection metrics.
*/
private def drawBar(m: ClusterMetrics) {
assert(m != null)
val pipe = "|"
def bar(cpuLoad: Double, memUsed: Double): String = {
val nCpu = if (cpuLoad < 0 || cpuLoad > 1) 0 else (cpuLoad * 20).toInt
val nMem = if (memUsed < 0 || memUsed > 1) 0 else (memUsed * 20).toInt
("" /: (0 until 20))((s: String, i: Int) => {
s + (i match {
case a if a == nMem => "^"
case a if a <= nCpu => "="
case _ => '.'
})
})
}
logText("H/N/C" + pipe +
U.neighborhood(ignite.cluster.nodes()).size.toString.padTo(4, ' ') + pipe +
m.getTotalNodes.toString.padTo(4, ' ') + pipe +
m.getTotalCpus.toString.padTo(4, ' ') + pipe +
bar(m.getAverageCpuLoad, m.getHeapMemoryUsed / m.getHeapMemoryTotal) + pipe
)
}
/**
* Logs text message.
*
* @param msg Message to log.
*/
def logText(msg: String) {
assert(msg != null)
if (logStarted) {
var out: FileWriter = null
try {
out = new FileWriter(logFile, true)
logImpl(
out,
formatDateTime(System.currentTimeMillis),
null,
msg
)
}
catch {
case e: IOException => ()
}
finally {
U.close(out, null)
}
}
}
/**
* @param out Writer.
* @param tstamp Timestamp of the log.
* @param node Node associated with the event.
* @param msg Message associated with the event.
*/
private def logImpl(
out: java.io.Writer,
tstamp: String,
node: String = null,
msg: String
) {
assert(out != null)
assert(tstamp != null)
assert(msg != null)
assert(logStarted)
if (node != null)
out.write(tstamp.padTo(18, ' ') + " | " + node + " => " + msg + "\\n")
else
out.write(tstamp.padTo(18, ' ') + " | " + msg + "\\n")
}
/**
* Prints out status and help in case someone calls `visor()`.
*
*/
def apply() {
status()
nl()
help()
}
lazy val commands = cmdLst.map(_.name) ++ cmdLst.flatMap(_.aliases)
def searchCmd(cmd: String) = cmdLst.find(c => c.name.equals(cmd) || (c.aliases != null && c.aliases.contains(cmd)))
/**
* Transform node ID to ID8 string.
*
* @param node Node to take ID from.
* @return Node ID in ID8 format.
*/
def nid8(node: ClusterNode): String = {
nid8(node.id())
}
/**
* Transform node ID to ID8 string.
*
* @param nid Node ID.
* @return Node ID in ID8 format.
*/
def nid8(nid: UUID): String = {
nid.toString.take(8).toUpperCase
}
/**
* Get node by ID8 string.
*
* @param id8 Node ID in ID8 format.
* @return Collection of nodes that has specified ID8.
*/
def nodeById8(id8: String) = {
ignite.cluster.nodes().filter(n => id8.equalsIgnoreCase(nid8(n)))
}
/**
* Introduction of `^^` operator for `Any` type that will call `break`.
*
* @param v `Any` value.
*/
implicit def toReturnable(v: Any) = new {
// Ignore the warning below.
def ^^ {
break()
}
}
/**
* Decode time frame from string.
*
* @param timeArg Optional time frame: <num>s|m|h|d
* @return Time in milliseconds.
*/
def timeFilter(timeArg: Option[String]): Long = {
if (timeArg.nonEmpty) {
val s = timeArg.get
val n = try
s.substring(0, s.length - 1).toLong
catch {
case _: NumberFormatException =>
throw new IllegalArgumentException("Time frame size is not numeric in: " + s)
}
if (n <= 0)
throw new IllegalArgumentException("Time frame size is not positive in: " + s)
val timeUnit = s.last match {
case 's' => 1000L
case 'm' => 1000L * 60L
case 'h' => 1000L * 60L * 60L
case 'd' => 1000L * 60L * 60L * 24L
case _ => throw new IllegalArgumentException("Invalid time frame suffix in: " + s)
}
n * timeUnit
}
else
Long.MaxValue
}
/**
* Sort addresses to properly display in Visor.
*
* @param addrs Addresses to sort.
* @return Sorted list.
*/
def sortAddresses(addrs: Iterable[String]) = {
def ipToLong(ip: String) = {
try {
val octets = if (ip.contains(".")) ip.split('.') else ip.split(':')
var dec = BigDecimal.valueOf(0L)
for (i <- octets.indices) dec += octets(i).toLong * math.pow(256, octets.length - 1 - i).toLong
dec
}
catch {
case _: Exception => BigDecimal.valueOf(0L)
}
}
/**
* Sort addresses to properly display in Visor.
*
* @param addr Address to detect type for.
* @return IP class type for sorting in order: public addresses IPv4 + private IPv4 + localhost + IPv6.
*/
def addrType(addr: String) = {
if (addr.contains(':'))
4 // IPv6
else {
try {
InetAddress.getByName(addr) match {
case ip if ip.isLoopbackAddress => 3 // localhost
case ip if ip.isSiteLocalAddress => 2 // private IPv4
case _ => 1 // other IPv4
}
}
catch {
case ignore: UnknownHostException => 5
}
}
}
addrs.map(addr => (addrType(addr), ipToLong(addr), addr)).toSeq.
sortWith((l, r) => if (l._1 == r._1) l._2.compare(r._2) < 0 else l._1 < r._1).map(_._3)
}
}
| WilliamDo/ignite | modules/visor-console/src/main/scala/org/apache/ignite/visor/visor.scala | Scala | apache-2.0 | 81,667 |
/*
* Copyright 2015 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.box.retriever
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.box.CtValue
import uk.gov.hmrc.ct.ct600a.v2.retriever.CT600ABoxRetriever
import uk.gov.hmrc.ct._
import uk.gov.hmrc.ct.domain.CompanyTypes
class FilingAttributesBoxValueRetrieverSpec extends WordSpec with Matchers {
"FilingAttributesBoxValueRetriever" should {
"have 8 functions" in {
FilingAttributesBoxValueRetriever.retrieveBoxIdFunctions(classOf[FilingAttributesBoxValueRetriever]).size shouldBe 8
}
"get ct values" in {
val retriever = new FilingAttributesBoxValueRetrieverForTest
val result = FilingAttributesBoxValueRetriever.generateValues(retriever)
result("ProductName") shouldBe retriever.retrieveProductName()
result("FilingCompanyType") shouldBe retriever.retrieveCompanyType()
result("AbbreviatedAccountsFiling") shouldBe retriever.retrieveAbbreviatedAccountsFiling()
result("StatutoryAccountsFiling") shouldBe retriever.retrieveStatutoryAccountsFiling()
result("MicroEntityFiling") shouldBe retriever.retrieveMicroEntityFiling()
result("AbridgedFiling") shouldBe retriever.retrieveAbridgedFiling()
result("CompaniesHouseFiling") shouldBe retriever.retrieveCompaniesHouseFiling()
result("HMRCFiling") shouldBe retriever.retrieveHMRCFiling()
}
}
}
class FilingAttributesBoxValueRetrieverForTest extends FilingAttributesBoxValueRetriever {
override def generateValues: Map[String, CtValue[_]] = ???
override def retrieveProductName(): ProductName = ProductName("productType")
override def retrieveCompanyType(): FilingCompanyType = FilingCompanyType(CompanyTypes.UkTradingCompany)
override def retrieveAbbreviatedAccountsFiling(): AbbreviatedAccountsFiling = AbbreviatedAccountsFiling(false)
override def retrieveStatutoryAccountsFiling(): StatutoryAccountsFiling = StatutoryAccountsFiling(true)
override def retrieveMicroEntityFiling(): MicroEntityFiling = MicroEntityFiling(false)
override def retrieveAbridgedFiling(): AbridgedFiling = AbridgedFiling(false)
override def retrieveCompaniesHouseFiling(): CompaniesHouseFiling = CompaniesHouseFiling(true)
override def retrieveHMRCFiling(): HMRCFiling = HMRCFiling(true)
}
| keithhall/ct-calculations | src/test/scala/uk/gov/hmrc/ct/box/retriever/FilingAttributesBoxValueRetrieverSpec.scala | Scala | apache-2.0 | 2,862 |
package model
import model.battle.Status
import model.pokedex.Pokedex
import org.scalatest.FlatSpec
/**
* Created by salim on 16/09/2016.
*/
class PokemonSpec extends FlatSpec {
"Pokemon" should "be creatable by id" in {
val pokedex: Pokedex = Pokedex.boot
val p: Pokemon = Pokemon.spawn(pokedex, 25).named("Foofoo")
assert(p.name.get == "Foofoo")
assert(p.pokedexEntry.name == "pikachu")
}
"it" can "be knocked out" in {
val pokedex: Pokedex = Pokedex.boot
val p: Pokemon = Pokemon.spawn(pokedex, 25)
assert(p.hitPoints == p.maxHitPoints)
assert(p.canBattle)
p.doDamage(p.maxHitPoints)
assert(p.hitPoints == 0)
assert(p.battleStatus == Status.Unconcious)
assert(!p.canBattle)
}
"it" can "never go below zero hit points" in {
val pokedex: Pokedex = Pokedex.boot
val p: Pokemon = Pokemon.spawn(pokedex, 25)
assert(p.hitPoints == p.maxHitPoints)
p.doDamage(p.maxHitPoints + 1)
assert(p.hitPoints == 0)
assert(p.battleStatus == Status.Unconcious)
}
}
| salimfadhley/scalamoo | src/test/scala/model/PokemonSpec.scala | Scala | mit | 1,042 |
/**
* Licensed to the Minutemen Group under one or more contributor license
* agreements. See the COPYRIGHT file distributed with this work for
* additional information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package silhouette.crypto.jca
import java.security.MessageDigest
import javax.crypto.Cipher
import javax.crypto.spec.{ IvParameterSpec, SecretKeySpec }
import silhouette.crypto.jca.JcaCrypter._
import silhouette.crypto.{ Crypter, CryptoException }
/**
* Crypter implementation based on JCA (Java Cryptography Architecture).
*
* The algorithm used in this implementation is `AES/CTR/NoPadding`. Beware that CTR is
* [[https://en.wikipedia.org/wiki/Malleability_%28cryptography%29 malleable]], which might
* be abused for various attacks if messages are not properly
* [[https://en.wikipedia.org/wiki/Malleability_%28cryptography%29 authenticated]].
*
* @param config The config instance.
*/
class JcaCrypter(config: JcaCrypterConfig) extends Crypter {
/**
* Encrypts a string.
*
* @param value The plain text to encrypt.
* @return The encrypted string.
*/
override def encrypt(value: String): String = {
val keySpec = secretKeyWithSha256(config.key, "AES")
val cipher = Cipher.getInstance("AES/CTR/NoPadding")
cipher.init(Cipher.ENCRYPT_MODE, keySpec)
val encryptedValue = cipher.doFinal(value.getBytes("UTF-8"))
val version = 1
Option(cipher.getIV) match {
case Some(iv) => s"$version-${java.util.Base64.getEncoder.encodeToString(iv ++ encryptedValue)}"
case None => throw new CryptoException(UnderlyingIVBug)
}
}
/**
* Decrypts a string.
*
* @param value The value to decrypt.
* @return The plain text string.
*/
override def decrypt(value: String): String = {
value.split("-", 2) match {
case Array(version, data) if version == "1" => decryptVersion1(data, config.key)
case Array(version, _) => throw new CryptoException(UnknownVersion.format(version))
case _ => throw new CryptoException(UnexpectedFormat)
}
}
/**
* Generates the SecretKeySpec, given the private key and the algorithm.
*/
private def secretKeyWithSha256(privateKey: String, algorithm: String) = {
val messageDigest = MessageDigest.getInstance("SHA-256")
messageDigest.update(privateKey.getBytes("UTF-8"))
// max allowed length in bits / (8 bits to a byte)
val maxAllowedKeyLength = Cipher.getMaxAllowedKeyLength(algorithm) / 8
val raw = messageDigest.digest().slice(0, maxAllowedKeyLength)
new SecretKeySpec(raw, algorithm)
}
/**
* V1 decryption algorithm (AES/CTR/NoPadding - IV present).
*/
private def decryptVersion1(value: String, privateKey: String): String = {
val data = java.util.Base64.getDecoder.decode(value)
val keySpec = secretKeyWithSha256(privateKey, "AES")
val cipher = Cipher.getInstance("AES/CTR/NoPadding")
val blockSize = cipher.getBlockSize
val iv = data.slice(0, blockSize)
val payload = data.slice(blockSize, data.size)
cipher.init(Cipher.DECRYPT_MODE, keySpec, new IvParameterSpec(iv))
new String(cipher.doFinal(payload), "UTF-8")
}
}
/**
* The companion object.
*/
object JcaCrypter {
val UnderlyingIVBug = "Cannot get IV! There must be a bug in your underlying JCE " +
"implementation; The AES/CTR/NoPadding transformation should always provide an IV"
val UnexpectedFormat = "Unexpected format; expected [VERSION]-[ENCRYPTED STRING]"
val UnknownVersion = "Unknown version: %s"
}
/**
* The config for the JCA crypter.
*
* @param key The encryption key.
*/
case class JcaCrypterConfig(key: String)
| mohiva/silhouette | modules/crypto-jca/src/main/scala/silhouette/crypto/jca/JcaCrypter.scala | Scala | apache-2.0 | 4,243 |
/*
* Copyright 2009-2010 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.norbert.cluster
import org.specs.SpecificationWithJUnit
import com.linkedin.norbert.protos.NorbertProtos
class NodeSpec extends SpecificationWithJUnit {
"Node" should {
"serialize into the correct format" in {
val builder = NorbertProtos.Node.newBuilder
builder.setId(1)
builder.setUrl("localhost:31313")
builder.addPartition(0).addPartition(1)
val bytes = builder.build.toByteArray
Node.nodeToByteArray(Node(1, "localhost:31313", Set(0, 1), false)) must containInOrder(bytes)
}
"deserialize into the corrent Node" in {
val builder = NorbertProtos.Node.newBuilder
builder.setId(1)
builder.setUrl("localhost:31313")
builder.addPartition(0).addPartition(1)
val bytes = builder.build.toByteArray
val node = Node(1, "localhost:31313", Set(0, 1), true)
Node(1, bytes, true) must be_==(node)
}
"have a sane equals method" in {
val url = "localhost:31313"
val node1 = Node(1, url, Set(0, 1), true)
val node2 = Node(1, url, Set(2, 3), false)
val node3 = Node(1, url, Set(4, 5), true)
// Reflexive
node1 must be_==(node1)
// Symmetric
node1 must be_==(node2)
node2 must be_==(node1)
// Transitive
node1 must be_==(node2)
node2 must be_==(node3)
node3 must be_==(node1)
// Consistent already handled above
// Handles null
node1 must be_!=(null)
// Hashcode
node1.hashCode must be_==(node2.hashCode)
}
"be equal to another node if they have the same id and url" in {
val url = "localhost:31313"
val node1 = Node(1, url, Set(0, 1), true)
val node2 = Node(1, url, Set(1, 2), false)
node1 must be_==(node2)
}
"not be equal to another node if they have a different id" in {
val url = "localhost:31313"
val node1 = Node(1, url, Set(0, 1), true)
val node2 = Node(2, url, Set(1, 2), false)
node1 must be_!=(node2)
}
"not be equal to another node if they have a different url" in {
val node1 = Node(1, "localhost:31313", Set(0, 1), true)
val node2 = Node(1, "localhost:16161", Set(0, 1), true)
node1 must be_!=(node2)
}
}
}
| rhavyn/norbert | cluster/src/test/scala/com/linkedin/norbert/cluster/NodeSpec.scala | Scala | apache-2.0 | 2,843 |
def foo(a: String) = 0
val z: String => Unit = /*start*/foo(_)/*end*/ // good code red
//(String) => Unit | triggerNZ/intellij-scala | testdata/typeInference/bugs5/SCL3549A.scala | Scala | apache-2.0 | 105 |
package com.google.cloud.spark.bigquery.pushdowns
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.{Attribute, UnsafeProjection}
import org.apache.spark.sql.execution.SparkPlan
import org.apache.spark.sql.types.{StructField, StructType}
/** BigQueryPlan, with RDD defined by custom query. */
case class BigQueryPlan(output: Seq[Attribute], rdd: RDD[InternalRow])
extends SparkPlan {
override def children: Seq[SparkPlan] = Nil
protected override def doExecute(): RDD[InternalRow] = {
val schema = StructType(
output.map(attr => StructField(attr.name, attr.dataType, attr.nullable))
)
rdd.mapPartitions { iter =>
val project = UnsafeProjection.create(schema)
iter.map(project)
}
}
}
| GoogleCloudDataproc/spark-bigquery-connector | spark-bigquery-pushdown/pushdown_common_src/main/scala/com/google/cloud/spark/bigquery/pushdowns/BigQueryPlan.scala | Scala | apache-2.0 | 811 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.integration.torch
import com.intel.analytics.bigdl.nn.{BilinearFiller, Sequential, VolumetricFullConvolution, Zeros}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.RandomGenerator._
import com.intel.analytics.bigdl.utils.{T, Table}
import scala.util.Random
@com.intel.analytics.bigdl.tags.Serial
class VolumetricFullConvolutionSpec extends TorchSpec {
"A VolumetricFullConvolution" should "generate correct output" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val nInputPlane = 3
val nOutputPlane = 6
val kT = 4
val kW = 3
val kH = 3
val dT = 2
val dW = 1
val dH = 1
val padT = 2
val padW = 2
val padH = 2
val layer = new VolumetricFullConvolution[Double](nInputPlane, nOutputPlane,
kT, kW, kH, dT, dW, dH, padT, padW, padH)
Random.setSeed(seed)
val input = Tensor[Double](3, 3, 3, 6, 6).apply1(e => Random.nextDouble())
layer.updateOutput(input)
val output = layer.updateOutput(input)
val code = "torch.manualSeed(" + seed + ")\\n" +
s"layer = nn.VolumetricFullConvolution($nInputPlane," +
s" $nOutputPlane, $kT, $kW, $kH, $dT, $dW, $dH, $padT, $padW, $padH)\\n" +
"weight = layer.weight\\n" +
"bias = layer.bias \\n" +
"output = layer:forward(input) "
val (luaTime, torchResult) = TH.run(code, Map("input" -> input),
Array("weight", "bias", "output"))
val luaWeight = torchResult("weight").asInstanceOf[Tensor[Double]]
val luaBias = torchResult("bias").asInstanceOf[Tensor[Double]]
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val weight = layer.weight
val bias = layer.bias
weight should be(luaWeight.resizeAs(weight))
bias should be(luaBias)
output should be(luaOutput)
}
"A VolumetricFullConvolution on rectangle input" should "generate correct output" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val nInputPlane = 10
val nOutputPlane = 10
val kT = 5
val kW = 4
val kH = 4
val dT = 3
val dW = 2
val dH = 2
val padT = 2
val padW = 1
val padH = 1
val layer = new VolumetricFullConvolution[Double](nInputPlane, nOutputPlane,
kT, kW, kH, dT, dW, dH, padT, padW, padH)
Random.setSeed(seed)
val input = Tensor[Double](1, nInputPlane, 10, 20, 30).apply1(e => Random.nextDouble())
val output = layer.updateOutput(input)
val code = "torch.manualSeed(" + seed + ")\\n" +
s"layer = nn.VolumetricFullConvolution($nInputPlane," +
s" $nOutputPlane, $kT, $kW, $kH, $dT, $dW, $dH, $padT, $padW, $padH)\\n" +
"weight = layer.weight\\n" +
"bias = layer.bias \\n" +
"output = layer:forward(input) "
val (luaTime, torchResult) = TH.run(code, Map("input" -> input),
Array("weight", "bias", "output"))
val luaWeight = torchResult("weight").asInstanceOf[Tensor[Double]]
val luaBias = torchResult("bias").asInstanceOf[Tensor[Double]]
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val weight = layer.weight
val bias = layer.bias
weight should be(luaWeight.resizeAs(weight))
bias should be(luaBias)
output should be(luaOutput)
}
"A VolumetricFullConvolution" should "generate correct output and grad" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val nInputPlane = 3
val nOutputPlane = 6
val kT = 4
val kW = 3
val kH = 3
val dT = 2
val dW = 1
val dH = 1
val padT = 2
val padW = 2
val padH = 2
val layer = new VolumetricFullConvolution[Double](nInputPlane, nOutputPlane,
kT, kW, kH, dT, dW, dH, padT, padW, padH)
val model = new Sequential[Double]()
model.add(layer)
Random.setSeed(3)
val input = Tensor[Double](3, nInputPlane, 3, 6, 6).apply1(e => Random.nextDouble())
var output = model.updateOutput(input).toTensor[Double]
val gradOutput = Tensor[Double]().resizeAs(output).apply1(e => Random.nextDouble())
var gradInput = model.backward(input, gradOutput)
output = model.updateOutput(input).toTensor[Double]
gradInput = model.backward(input, gradOutput)
val code = "torch.manualSeed(" + seed + ")\\n" +
s"""layer = nn.VolumetricFullConvolution($nInputPlane,
$nOutputPlane, $kT, $kW, $kH, $dT, $dW, $dH, $padT, $padW, $padH)
model = nn.Sequential()
model:add(layer)
weight = layer.weight
bias = layer.bias
model:zeroGradParameters()
output = model:forward(input)
gradInput = model:backward(input, gradOutput)
output = model:forward(input)
gradInput = model:backward(input, gradOutput)
gradBias = layer.gradBias
gradWeight = layer.gradWeight
"""
val (luaTime, torchResult) = TH.run(code,
Map("input" -> input, "gradOutput" -> gradOutput),
Array("weight", "bias", "output", "gradInput", "gradBias", "gradWeight")
)
val luaWeight = torchResult("weight").asInstanceOf[Tensor[Double]]
val luaBias = torchResult("bias").asInstanceOf[Tensor[Double]]
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
val luaGradBias = torchResult("gradBias").asInstanceOf[Tensor[Double]]
val luaGradWeight = torchResult("gradWeight").asInstanceOf[Tensor[Double]]
val weight = layer.weight
val bias = layer.bias
weight should be(luaWeight.resizeAs(weight))
bias should be(luaBias)
output should be(luaOutput)
gradInput should be(luaGradInput)
luaGradBias should be (layer.gradBias)
luaGradWeight should be (layer.gradWeight.resizeAs(luaGradWeight))
}
"A VolumetricFullConvolution" should "generate correct output and grad with 4D input" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val nInputPlane = 3
val nOutputPlane = 6
val kT = 4
val kW = 3
val kH = 3
val dT = 2
val dW = 1
val dH = 1
val padT = 2
val padW = 2
val padH = 2
val layer = new VolumetricFullConvolution[Double](nInputPlane, nOutputPlane,
kT, kW, kH, dT, dW, dH, padT, padW, padH)
val model = new Sequential[Double]()
model.add(layer)
Random.setSeed(3)
val input = Tensor[Double](nInputPlane, 3, 6, 6).apply1(e => Random.nextDouble())
val output = model.updateOutput(input).toTensor[Double]
val gradOutput = Tensor[Double]().resizeAs(output).apply1(e => Random.nextDouble())
val gradInput = model.backward(input, gradOutput)
val code = "torch.manualSeed(" + seed + ")\\n" +
s"""layer = nn.VolumetricFullConvolution($nInputPlane,
$nOutputPlane, $kT, $kW, $kH, $dT, $dW, $dH, $padT, $padW, $padH)
model = nn.Sequential()
model:add(layer)
weight = layer.weight
bias = layer.bias
model:zeroGradParameters()
output = model:forward(input)
gradInput = model:backward(input, gradOutput)
gradBias = layer.gradBias
gradWeight = layer.gradWeight
"""
val (luaTime, torchResult) = TH.run(code,
Map("input" -> input, "gradOutput" -> gradOutput),
Array("weight", "bias", "output", "gradInput", "gradBias", "gradWeight")
)
val luaWeight = torchResult("weight").asInstanceOf[Tensor[Double]]
val luaBias = torchResult("bias").asInstanceOf[Tensor[Double]]
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
val luaGradBias = torchResult("gradBias").asInstanceOf[Tensor[Double]]
val luaGradWeight = torchResult("gradWeight").asInstanceOf[Tensor[Double]]
val weight = layer.weight
val bias = layer.bias
weight should be(luaWeight.resizeAs(weight))
bias should be(luaBias)
output should be(luaOutput)
gradInput should be(luaGradInput)
luaGradBias should be (layer.gradBias)
luaGradWeight should be (layer.gradWeight.resizeAs(luaGradWeight))
}
"A VolumetricFullConvolution noBias" should "generate correct output and grad with 3D input" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val nInputPlane = 3
val nOutputPlane = 6
val kT = 4
val kW = 3
val kH = 3
val dT = 2
val dW = 1
val dH = 1
val padT = 2
val padW = 2
val padH = 2
val layer = new VolumetricFullConvolution[Double](nInputPlane, nOutputPlane,
kT, kW, kH, dT, dW, dH, padT, padW, padH, noBias = true)
val model = new Sequential[Double]()
model.add(layer)
Random.setSeed(3)
val input = Tensor[Double](3, nInputPlane, 6, 6).apply1(e => Random.nextDouble())
val output = model.updateOutput(input).toTensor[Double]
val gradOutput = Tensor[Double]().resizeAs(output).apply1(e => Random.nextDouble())
val gradInput = model.backward(input, gradOutput)
val code = "torch.manualSeed(" + seed + ")\\n" +
s"""layer = nn.VolumetricFullConvolution($nInputPlane,
$nOutputPlane, $kT, $kW, $kH, $dT, $dW, $dH, $padT, $padW, $padH)
layer:noBias()
model = nn.Sequential()
model:add(layer)
weight = layer.weight
bias = layer.bias
model:zeroGradParameters()
output = model:forward(input)
gradInput = model:backward(input, gradOutput)
gradBias = layer.gradBias
gradWeight = layer.gradWeight
"""
val (luaTime, torchResult) = TH.run(code,
Map("input" -> input, "gradOutput" -> gradOutput),
Array("weight", "output", "gradInput", "gradWeight")
)
val luaWeight = torchResult("weight").asInstanceOf[Tensor[Double]]
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
val luaGradWeight = torchResult("gradWeight").asInstanceOf[Tensor[Double]]
val weight = layer.weight
val bias = layer.bias
weight should be(luaWeight.resizeAs(weight))
output should be(luaOutput)
gradInput should be(luaGradInput)
luaGradWeight should be (layer.gradWeight.resizeAs(luaGradWeight))
}
"A VolumetricFullConvolution" should "generate correct output and grad with table input" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val nInputPlane = 3
val nOutputPlane = 6
val kT = 3
val kW = 3
val kH = 3
val dT = 2
val dW = 2
val dH = 2
val padW = 1
val padT = 1
val padH = 1
val layer = new VolumetricFullConvolution[Double](nInputPlane, nOutputPlane,
kT, kW, kH, dT, dW, dH, padT, padW, padH)
Random.setSeed(3)
val input1 = Tensor[Double](nInputPlane, 3, 6, 6).apply1(e => Random.nextDouble())
val input2 = Tensor[Double](nInputPlane, 6, 6).apply1(e => Random.nextInt(dH))
val input = T(input1, input2)
val output = layer.updateOutput(input)
val gradOutput = Tensor[Double]().resizeAs(output).apply1(e => Random.nextDouble())
val gradInput = layer.backward(input, gradOutput)
val code = "torch.manualSeed(" + seed + ")\\n" +
s"""layer = nn.VolumetricFullConvolution($nInputPlane,
$nOutputPlane, $kT, $kW, $kH, $dT, $dW, $dH, $padT, $padW, $padH)
input = {input1, input2}
model = nn.Sequential()
model:add(layer)
weight = layer.weight
bias = layer.bias
model:zeroGradParameters()
output = model:forward(input)
gradInput = model:backward(input, gradOutput)
gradBias = layer.gradBias
gradWeight = layer.gradWeight
gradInput1 = gradInput[1]
gradInput2 = gradInput[2]
"""
val (luaTime, torchResult) = TH.run(code,
Map("input1" -> input1, "input2" -> input2, "gradOutput" -> gradOutput),
Array("weight", "bias", "output", "gradInput1", "gradInput2", "gradBias", "gradWeight")
)
val luaWeight = torchResult("weight").asInstanceOf[Tensor[Double]]
val luaBias = torchResult("bias").asInstanceOf[Tensor[Double]]
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput1 = torchResult("gradInput1").asInstanceOf[Tensor[Double]]
val luaGradInput2 = torchResult("gradInput2").asInstanceOf[Tensor[Double]]
val luaGradInput = T(luaGradInput1, luaGradInput2)
val luaGradBias = torchResult("gradBias").asInstanceOf[Tensor[Double]]
val luaGradWeight = torchResult("gradWeight").asInstanceOf[Tensor[Double]]
val weight = layer.weight
val bias = layer.bias
weight should be(luaWeight.resizeAs(weight))
bias should be(luaBias)
output should be(luaOutput)
gradInput should be(luaGradInput)
luaGradBias should be (layer.gradBias)
luaGradWeight should be (layer.gradWeight.resizeAs(luaGradWeight))
}
"A VolumetricFullConvolution OneToOne" should "generate correct output and grad" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val nInputPlane = 6
val nOutputPlane = 6
val kT = 3
val kW = 3
val kH = 3
val dT = 1
val dW = 1
val dH = 1
val layer = new VolumetricFullConvolution[Double](nInputPlane, nOutputPlane,
kT, kW, kH, dT, dW, dH)
Random.setSeed(3)
val input = Tensor[Double](6, nInputPlane, 5, 5).apply1(e => Random.nextDouble())
val output = layer.forward(input).toTensor[Double]
val gradOutput = Tensor[Double]().resizeAs(output).apply1(e => Random.nextDouble())
val gradInput = layer.updateGradInput(input, gradOutput)
layer.accGradParameters(input, gradOutput)
val code = "torch.manualSeed(" + seed + ")\\n" +
s"""tt = nn.tables.oneToOne(6)
layer = nn.VolumetricFullConvolution($nInputPlane,
$nOutputPlane, $kT, $kW, $kH, $dT, $dW, $dH)
layer.weight:copy(weight)
layer.bias:copy(bias)
model = nn.Sequential()
model:add(layer)
model:zeroGradParameters()
output = model:forward(input)
gradInput = model:backward(input, gradOutput)
gradBias = layer.gradBias
gradWeight = layer.gradWeight
"""
val (luaTime, torchResult) = TH.run(code,
Map("input" -> input, "gradOutput" -> gradOutput,
"weight" -> layer.weight, "bias" -> layer.bias),
Array("output", "gradInput", "gradBias", "gradWeight")
)
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
val luaGradBias = torchResult("gradBias").asInstanceOf[Tensor[Double]]
val luaGradWeight = torchResult("gradWeight").asInstanceOf[Tensor[Double]]
val weight = layer.weight
val bias = layer.bias
output should be(luaOutput)
gradInput should be(luaGradInput)
luaGradBias should be (layer.gradBias)
luaGradWeight should be (layer.gradWeight.resizeAs(luaGradWeight))
}
"A VolumetricFullConvolution with different input" should "generate correct output and grad" in {
torchCheck()
val seed = 100
RNG.setSeed(seed)
val nInputPlane = 3
val nOutputPlane = 6
val kT = 4
val kW = 3
val kH = 3
val dT = 2
val dW = 1
val dH = 1
val padT = 2
val padW = 2
val padH = 2
val layer = new VolumetricFullConvolution[Double](nInputPlane, nOutputPlane,
kT, kW, kH, dT, dW, dH, padT, padW, padH)
val model = new Sequential[Double]()
model.add(layer)
Random.setSeed(3)
val input = Tensor[Double](3, nInputPlane, 3, 6, 6).apply1(e => Random.nextDouble())
val input2 = Tensor[Double](6, nInputPlane, 3, 6, 6).apply1(e => Random.nextDouble())
val output = model.updateOutput(input).toTensor[Double]
val gradOutput = Tensor[Double]().resizeAs(output).apply1(e => Random.nextDouble())
val gradInput = model.backward(input, gradOutput)
model.zeroGradParameters()
val output2 = model.updateOutput(input2).toTensor[Double]
val gradOutput2 = Tensor[Double]().resizeAs(output2).apply1(e => Random.nextDouble())
val gradInput2 = model.backward(input2, gradOutput2)
val code = "torch.manualSeed(" + seed + ")\\n" +
s"""layer = nn.VolumetricFullConvolution($nInputPlane,
$nOutputPlane, $kT, $kW, $kH, $dT, $dW, $dH, $padT, $padW, $padH)
model = nn.Sequential()
model:add(layer)
weight = layer.weight
bias = layer.bias
model:zeroGradParameters()
output = model:forward(input)
gradInput = model:backward(input, gradOutput)
gradBias = layer.gradBias
gradWeight = layer.gradWeight
"""
val (luaTime, torchResult) = TH.run(code,
Map("input" -> input2, "gradOutput" -> gradOutput2),
Array("weight", "bias", "output", "gradInput", "gradBias", "gradWeight")
)
val luaWeight = torchResult("weight").asInstanceOf[Tensor[Double]]
val luaBias = torchResult("bias").asInstanceOf[Tensor[Double]]
val luaOutput = torchResult("output").asInstanceOf[Tensor[Double]]
val luaGradInput = torchResult("gradInput").asInstanceOf[Tensor[Double]]
val luaGradBias = torchResult("gradBias").asInstanceOf[Tensor[Double]]
val luaGradWeight = torchResult("gradWeight").asInstanceOf[Tensor[Double]]
val weight = layer.weight
val bias = layer.bias
weight should be(luaWeight.resizeAs(weight))
bias should be(luaBias)
output2 should be(luaOutput)
gradInput2 should be(luaGradInput)
luaGradBias should be (layer.gradBias)
luaGradWeight should be (layer.gradWeight.resizeAs(luaGradWeight))
}
}
| zhangxiaoli73/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/integration/torch/VolumetricFullConvolutionSpec.scala | Scala | apache-2.0 | 18,040 |
object Test {
inline def power(x: Double, inline n: Int): Double = // ok
inline if n == 0 then ??? else ???
inline val N = 10
def X = 20
power(2.0, N) // ok, since it's an inline parameter
power(2.0, X) // error: cannot reduce inline if
}
| dotty-staging/dotty | tests/neg/inlinevals-3.scala | Scala | apache-2.0 | 257 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import java.util.{Locale, Properties}
import scala.collection.JavaConverters._
import org.apache.spark.Partition
import org.apache.spark.annotation.InterfaceStability
import org.apache.spark.api.java.JavaRDD
import org.apache.spark.internal.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.json.{CreateJacksonParser, JacksonParser, JSONOptions}
import org.apache.spark.sql.execution.command.DDLUtils
import org.apache.spark.sql.execution.datasources.{DataSource, FailureSafeParser}
import org.apache.spark.sql.execution.datasources.csv._
import org.apache.spark.sql.execution.datasources.jdbc._
import org.apache.spark.sql.execution.datasources.json.TextInputJsonDataSource
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation
import org.apache.spark.sql.sources.v2.{DataSourceV2, DataSourceV2Options, ReadSupport, ReadSupportWithSchema}
import org.apache.spark.sql.types.{StringType, StructType}
import org.apache.spark.unsafe.types.UTF8String
/**
* Interface used to load a [[Dataset]] from external storage systems (e.g. file systems,
* key-value stores, etc). Use `SparkSession.read` to access this.
*
* @since 1.4.0
*/
@InterfaceStability.Stable
class DataFrameReader private[sql](sparkSession: SparkSession) extends Logging {
/**
* Specifies the input data source format.
*
* @since 1.4.0
*/
def format(source: String): DataFrameReader = {
this.source = source
this
}
/**
* Specifies the input schema. Some data sources (e.g. JSON) can infer the input schema
* automatically from data. By specifying the schema here, the underlying data source can
* skip the schema inference step, and thus speed up data loading.
*
* @since 1.4.0
*/
def schema(schema: StructType): DataFrameReader = {
this.userSpecifiedSchema = Option(schema)
this
}
/**
* Specifies the schema by using the input DDL-formatted string. Some data sources (e.g. JSON) can
* infer the input schema automatically from data. By specifying the schema here, the underlying
* data source can skip the schema inference step, and thus speed up data loading.
*
* @since 2.3.0
*/
def schema(schemaString: String): DataFrameReader = {
this.userSpecifiedSchema = Option(StructType.fromDDL(schemaString))
this
}
/**
* Adds an input option for the underlying data source.
*
* You can set the following option(s):
* <ul>
* <li>`timeZone` (default session local timezone): sets the string that indicates a timezone
* to be used to parse timestamps in the JSON/CSV datasources or partition values.</li>
* </ul>
*
* @since 1.4.0
*/
def option(key: String, value: String): DataFrameReader = {
this.extraOptions += (key -> value)
this
}
/**
* Adds an input option for the underlying data source.
*
* @since 2.0.0
*/
def option(key: String, value: Boolean): DataFrameReader = option(key, value.toString)
/**
* Adds an input option for the underlying data source.
*
* @since 2.0.0
*/
def option(key: String, value: Long): DataFrameReader = option(key, value.toString)
/**
* Adds an input option for the underlying data source.
*
* @since 2.0.0
*/
def option(key: String, value: Double): DataFrameReader = option(key, value.toString)
/**
* (Scala-specific) Adds input options for the underlying data source.
*
* You can set the following option(s):
* <ul>
* <li>`timeZone` (default session local timezone): sets the string that indicates a timezone
* to be used to parse timestamps in the JSON/CSV datasources or partition values.</li>
* </ul>
*
* @since 1.4.0
*/
def options(options: scala.collection.Map[String, String]): DataFrameReader = {
this.extraOptions ++= options
this
}
/**
* Adds input options for the underlying data source.
*
* You can set the following option(s):
* <ul>
* <li>`timeZone` (default session local timezone): sets the string that indicates a timezone
* to be used to parse timestamps in the JSON/CSV datasources or partition values.</li>
* </ul>
*
* @since 1.4.0
*/
def options(options: java.util.Map[String, String]): DataFrameReader = {
this.options(options.asScala)
this
}
/**
* Loads input in as a `DataFrame`, for data sources that don't require a path (e.g. external
* key-value stores).
*
* @since 1.4.0
*/
def load(): DataFrame = {
load(Seq.empty: _*) // force invocation of `load(...varargs...)`
}
/**
* Loads input in as a `DataFrame`, for data sources that require a path (e.g. data backed by
* a local or distributed file system).
*
* @since 1.4.0
*/
def load(path: String): DataFrame = {
option("path", path).load(Seq.empty: _*) // force invocation of `load(...varargs...)`
}
/**
* Loads input in as a `DataFrame`, for data sources that support multiple paths.
* Only works if the source is a HadoopFsRelationProvider.
*
* @since 1.6.0
*/
@scala.annotation.varargs
def load(paths: String*): DataFrame = {
if (source.toLowerCase(Locale.ROOT) == DDLUtils.HIVE_PROVIDER) {
throw new AnalysisException("Hive data source can only be used with tables, you can not " +
"read files of Hive data source directly.")
}
val cls = DataSource.lookupDataSource(source)
if (classOf[DataSourceV2].isAssignableFrom(cls)) {
val dataSource = cls.newInstance()
val options = new DataSourceV2Options(extraOptions.asJava)
val reader = (cls.newInstance(), userSpecifiedSchema) match {
case (ds: ReadSupportWithSchema, Some(schema)) =>
ds.createReader(schema, options)
case (ds: ReadSupport, None) =>
ds.createReader(options)
case (_: ReadSupportWithSchema, None) =>
throw new AnalysisException(s"A schema needs to be specified when using $dataSource.")
case (ds: ReadSupport, Some(schema)) =>
val reader = ds.createReader(options)
if (reader.readSchema() != schema) {
throw new AnalysisException(s"$ds does not allow user-specified schemas.")
}
reader
case _ =>
throw new AnalysisException(s"$cls does not support data reading.")
}
Dataset.ofRows(sparkSession, DataSourceV2Relation(reader))
} else {
// Code path for data source v1.
sparkSession.baseRelationToDataFrame(
DataSource.apply(
sparkSession,
paths = paths,
userSpecifiedSchema = userSpecifiedSchema,
className = source,
options = extraOptions.toMap).resolveRelation())
}
}
/**
* Construct a `DataFrame` representing the database table accessible via JDBC URL
* url named table and connection properties.
*
* @since 1.4.0
*/
def jdbc(url: String, table: String, properties: Properties): DataFrame = {
assertNoSpecifiedSchema("jdbc")
// properties should override settings in extraOptions.
this.extraOptions ++= properties.asScala
// explicit url and dbtable should override all
this.extraOptions += (JDBCOptions.JDBC_URL -> url, JDBCOptions.JDBC_TABLE_NAME -> table)
format("jdbc").load()
}
/**
* Construct a `DataFrame` representing the database table accessible via JDBC URL
* url named table. Partitions of the table will be retrieved in parallel based on the parameters
* passed to this function.
*
* Don't create too many partitions in parallel on a large cluster; otherwise Spark might crash
* your external database systems.
*
* @param url JDBC database url of the form `jdbc:subprotocol:subname`.
* @param table Name of the table in the external database.
* @param columnName the name of a column of integral type that will be used for partitioning.
* @param lowerBound the minimum value of `columnName` used to decide partition stride.
* @param upperBound the maximum value of `columnName` used to decide partition stride.
* @param numPartitions the number of partitions. This, along with `lowerBound` (inclusive),
* `upperBound` (exclusive), form partition strides for generated WHERE
* clause expressions used to split the column `columnName` evenly. When
* the input is less than 1, the number is set to 1.
* @param connectionProperties JDBC database connection arguments, a list of arbitrary string
* tag/value. Normally at least a "user" and "password" property
* should be included. "fetchsize" can be used to control the
* number of rows per fetch.
* @since 1.4.0
*/
def jdbc(
url: String,
table: String,
columnName: String,
lowerBound: Long,
upperBound: Long,
numPartitions: Int,
connectionProperties: Properties): DataFrame = {
// columnName, lowerBound, upperBound and numPartitions override settings in extraOptions.
this.extraOptions ++= Map(
JDBCOptions.JDBC_PARTITION_COLUMN -> columnName,
JDBCOptions.JDBC_LOWER_BOUND -> lowerBound.toString,
JDBCOptions.JDBC_UPPER_BOUND -> upperBound.toString,
JDBCOptions.JDBC_NUM_PARTITIONS -> numPartitions.toString)
jdbc(url, table, connectionProperties)
}
/**
* Construct a `DataFrame` representing the database table accessible via JDBC URL
* url named table using connection properties. The `predicates` parameter gives a list
* expressions suitable for inclusion in WHERE clauses; each one defines one partition
* of the `DataFrame`.
*
* Don't create too many partitions in parallel on a large cluster; otherwise Spark might crash
* your external database systems.
*
* @param url JDBC database url of the form `jdbc:subprotocol:subname`
* @param table Name of the table in the external database.
* @param predicates Condition in the where clause for each partition.
* @param connectionProperties JDBC database connection arguments, a list of arbitrary string
* tag/value. Normally at least a "user" and "password" property
* should be included. "fetchsize" can be used to control the
* number of rows per fetch.
* @since 1.4.0
*/
def jdbc(
url: String,
table: String,
predicates: Array[String],
connectionProperties: Properties): DataFrame = {
assertNoSpecifiedSchema("jdbc")
// connectionProperties should override settings in extraOptions.
val params = extraOptions.toMap ++ connectionProperties.asScala.toMap
val options = new JDBCOptions(url, table, params)
val parts: Array[Partition] = predicates.zipWithIndex.map { case (part, i) =>
JDBCPartition(part, i) : Partition
}
val relation = JDBCRelation(parts, options)(sparkSession)
sparkSession.baseRelationToDataFrame(relation)
}
/**
* Loads a JSON file and returns the results as a `DataFrame`.
*
* See the documentation on the overloaded `json()` method with varargs for more details.
*
* @since 1.4.0
*/
def json(path: String): DataFrame = {
// This method ensures that calls that explicit need single argument works, see SPARK-16009
json(Seq(path): _*)
}
/**
* Loads JSON files and returns the results as a `DataFrame`.
*
* <a href="http://jsonlines.org/">JSON Lines</a> (newline-delimited JSON) is supported by
* default. For JSON (one record per file), set the `multiLine` option to true.
*
* This function goes through the input once to determine the input schema. If you know the
* schema in advance, use the version that specifies the schema to avoid the extra scan.
*
* You can set the following JSON-specific options to deal with non-standard JSON files:
* <ul>
* <li>`primitivesAsString` (default `false`): infers all primitive values as a string type</li>
* <li>`prefersDecimal` (default `false`): infers all floating-point values as a decimal
* type. If the values do not fit in decimal, then it infers them as doubles.</li>
* <li>`allowComments` (default `false`): ignores Java/C++ style comment in JSON records</li>
* <li>`allowUnquotedFieldNames` (default `false`): allows unquoted JSON field names</li>
* <li>`allowSingleQuotes` (default `true`): allows single quotes in addition to double quotes
* </li>
* <li>`allowNumericLeadingZeros` (default `false`): allows leading zeros in numbers
* (e.g. 00012)</li>
* <li>`allowBackslashEscapingAnyCharacter` (default `false`): allows accepting quoting of all
* character using backslash quoting mechanism</li>
* <li>`allowUnquotedControlChars` (default `false`): allows JSON Strings to contain unquoted
* control characters (ASCII characters with value less than 32, including tab and line feed
* characters) or not.</li>
* <li>`mode` (default `PERMISSIVE`): allows a mode for dealing with corrupt records
* during parsing.
* <ul>
* <li>`PERMISSIVE` : sets other fields to `null` when it meets a corrupted record, and puts
* the malformed string into a field configured by `columnNameOfCorruptRecord`. To keep
* corrupt records, an user can set a string type field named `columnNameOfCorruptRecord`
* in an user-defined schema. If a schema does not have the field, it drops corrupt records
* during parsing. When inferring a schema, it implicitly adds a `columnNameOfCorruptRecord`
* field in an output schema.</li>
* <li>`DROPMALFORMED` : ignores the whole corrupted records.</li>
* <li>`FAILFAST` : throws an exception when it meets corrupted records.</li>
* </ul>
* </li>
* <li>`columnNameOfCorruptRecord` (default is the value specified in
* `spark.sql.columnNameOfCorruptRecord`): allows renaming the new field having malformed string
* created by `PERMISSIVE` mode. This overrides `spark.sql.columnNameOfCorruptRecord`.</li>
* <li>`dateFormat` (default `yyyy-MM-dd`): sets the string that indicates a date format.
* Custom date formats follow the formats at `java.text.SimpleDateFormat`. This applies to
* date type.</li>
* <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss.SSSXXX`): sets the string that
* indicates a timestamp format. Custom date formats follow the formats at
* `java.text.SimpleDateFormat`. This applies to timestamp type.</li>
* <li>`multiLine` (default `false`): parse one record, which may span multiple lines,
* per file</li>
* </ul>
*
* @since 2.0.0
*/
@scala.annotation.varargs
def json(paths: String*): DataFrame = format("json").load(paths : _*)
/**
* Loads a `JavaRDD[String]` storing JSON objects (<a href="http://jsonlines.org/">JSON
* Lines text format or newline-delimited JSON</a>) and returns the result as
* a `DataFrame`.
*
* Unless the schema is specified using `schema` function, this function goes through the
* input once to determine the input schema.
*
* @param jsonRDD input RDD with one JSON object per record
* @since 1.4.0
*/
@deprecated("Use json(Dataset[String]) instead.", "2.2.0")
def json(jsonRDD: JavaRDD[String]): DataFrame = json(jsonRDD.rdd)
/**
* Loads an `RDD[String]` storing JSON objects (<a href="http://jsonlines.org/">JSON Lines
* text format or newline-delimited JSON</a>) and returns the result as a `DataFrame`.
*
* Unless the schema is specified using `schema` function, this function goes through the
* input once to determine the input schema.
*
* @param jsonRDD input RDD with one JSON object per record
* @since 1.4.0
*/
@deprecated("Use json(Dataset[String]) instead.", "2.2.0")
def json(jsonRDD: RDD[String]): DataFrame = {
json(sparkSession.createDataset(jsonRDD)(Encoders.STRING))
}
/**
* Loads a `Dataset[String]` storing JSON objects (<a href="http://jsonlines.org/">JSON Lines
* text format or newline-delimited JSON</a>) and returns the result as a `DataFrame`.
*
* Unless the schema is specified using `schema` function, this function goes through the
* input once to determine the input schema.
*
* @param jsonDataset input Dataset with one JSON object per record
* @since 2.2.0
*/
def json(jsonDataset: Dataset[String]): DataFrame = {
val parsedOptions = new JSONOptions(
extraOptions.toMap,
sparkSession.sessionState.conf.sessionLocalTimeZone,
sparkSession.sessionState.conf.columnNameOfCorruptRecord)
val schema = userSpecifiedSchema.getOrElse {
TextInputJsonDataSource.inferFromDataset(jsonDataset, parsedOptions)
}
verifyColumnNameOfCorruptRecord(schema, parsedOptions.columnNameOfCorruptRecord)
val actualSchema =
StructType(schema.filterNot(_.name == parsedOptions.columnNameOfCorruptRecord))
val createParser = CreateJacksonParser.string _
val parsed = jsonDataset.rdd.mapPartitions { iter =>
val rawParser = new JacksonParser(actualSchema, parsedOptions)
val parser = new FailureSafeParser[String](
input => rawParser.parse(input, createParser, UTF8String.fromString),
parsedOptions.parseMode,
schema,
parsedOptions.columnNameOfCorruptRecord)
iter.flatMap(parser.parse)
}
sparkSession.internalCreateDataFrame(parsed, schema, isStreaming = jsonDataset.isStreaming)
}
/**
* Loads a CSV file and returns the result as a `DataFrame`. See the documentation on the
* other overloaded `csv()` method for more details.
*
* @since 2.0.0
*/
def csv(path: String): DataFrame = {
// This method ensures that calls that explicit need single argument works, see SPARK-16009
csv(Seq(path): _*)
}
/**
* Loads an `Dataset[String]` storing CSV rows and returns the result as a `DataFrame`.
*
* If the schema is not specified using `schema` function and `inferSchema` option is enabled,
* this function goes through the input once to determine the input schema.
*
* If the schema is not specified using `schema` function and `inferSchema` option is disabled,
* it determines the columns as string types and it reads only the first line to determine the
* names and the number of fields.
*
* @param csvDataset input Dataset with one CSV row per record
* @since 2.2.0
*/
def csv(csvDataset: Dataset[String]): DataFrame = {
val parsedOptions: CSVOptions = new CSVOptions(
extraOptions.toMap,
sparkSession.sessionState.conf.sessionLocalTimeZone)
val filteredLines: Dataset[String] =
CSVUtils.filterCommentAndEmpty(csvDataset, parsedOptions)
val maybeFirstLine: Option[String] = filteredLines.take(1).headOption
val schema = userSpecifiedSchema.getOrElse {
TextInputCSVDataSource.inferFromDataset(
sparkSession,
csvDataset,
maybeFirstLine,
parsedOptions)
}
verifyColumnNameOfCorruptRecord(schema, parsedOptions.columnNameOfCorruptRecord)
val actualSchema =
StructType(schema.filterNot(_.name == parsedOptions.columnNameOfCorruptRecord))
val linesWithoutHeader: RDD[String] = maybeFirstLine.map { firstLine =>
filteredLines.rdd.mapPartitions(CSVUtils.filterHeaderLine(_, firstLine, parsedOptions))
}.getOrElse(filteredLines.rdd)
val parsed = linesWithoutHeader.mapPartitions { iter =>
val rawParser = new UnivocityParser(actualSchema, parsedOptions)
val parser = new FailureSafeParser[String](
input => Seq(rawParser.parse(input)),
parsedOptions.parseMode,
schema,
parsedOptions.columnNameOfCorruptRecord)
iter.flatMap(parser.parse)
}
sparkSession.internalCreateDataFrame(parsed, schema, isStreaming = csvDataset.isStreaming)
}
/**
* Loads CSV files and returns the result as a `DataFrame`.
*
* This function will go through the input once to determine the input schema if `inferSchema`
* is enabled. To avoid going through the entire data once, disable `inferSchema` option or
* specify the schema explicitly using `schema`.
*
* You can set the following CSV-specific options to deal with CSV files:
* <ul>
* <li>`sep` (default `,`): sets the single character as a separator for each
* field and value.</li>
* <li>`encoding` (default `UTF-8`): decodes the CSV files by the given encoding
* type.</li>
* <li>`quote` (default `"`): sets the single character used for escaping quoted values where
* the separator can be part of the value. If you would like to turn off quotations, you need to
* set not `null` but an empty string. This behaviour is different from
* `com.databricks.spark.csv`.</li>
* <li>`escape` (default `\\`): sets the single character used for escaping quotes inside
* an already quoted value.</li>
* <li>`comment` (default empty string): sets the single character used for skipping lines
* beginning with this character. By default, it is disabled.</li>
* <li>`header` (default `false`): uses the first line as names of columns.</li>
* <li>`inferSchema` (default `false`): infers the input schema automatically from data. It
* requires one extra pass over the data.</li>
* <li>`ignoreLeadingWhiteSpace` (default `false`): a flag indicating whether or not leading
* whitespaces from values being read should be skipped.</li>
* <li>`ignoreTrailingWhiteSpace` (default `false`): a flag indicating whether or not trailing
* whitespaces from values being read should be skipped.</li>
* <li>`nullValue` (default empty string): sets the string representation of a null value. Since
* 2.0.1, this applies to all supported types including the string type.</li>
* <li>`nanValue` (default `NaN`): sets the string representation of a non-number" value.</li>
* <li>`positiveInf` (default `Inf`): sets the string representation of a positive infinity
* value.</li>
* <li>`negativeInf` (default `-Inf`): sets the string representation of a negative infinity
* value.</li>
* <li>`dateFormat` (default `yyyy-MM-dd`): sets the string that indicates a date format.
* Custom date formats follow the formats at `java.text.SimpleDateFormat`. This applies to
* date type.</li>
* <li>`timestampFormat` (default `yyyy-MM-dd'T'HH:mm:ss.SSSXXX`): sets the string that
* indicates a timestamp format. Custom date formats follow the formats at
* `java.text.SimpleDateFormat`. This applies to timestamp type.</li>
* <li>`maxColumns` (default `20480`): defines a hard limit of how many columns
* a record can have.</li>
* <li>`maxCharsPerColumn` (default `-1`): defines the maximum number of characters allowed
* for any given value being read. By default, it is -1 meaning unlimited length</li>
* <li>`mode` (default `PERMISSIVE`): allows a mode for dealing with corrupt records
* during parsing. It supports the following case-insensitive modes.
* <ul>
* <li>`PERMISSIVE` : sets other fields to `null` when it meets a corrupted record, and puts
* the malformed string into a field configured by `columnNameOfCorruptRecord`. To keep
* corrupt records, an user can set a string type field named `columnNameOfCorruptRecord`
* in an user-defined schema. If a schema does not have the field, it drops corrupt records
* during parsing. When a length of parsed CSV tokens is shorter than an expected length
* of a schema, it sets `null` for extra fields.</li>
* <li>`DROPMALFORMED` : ignores the whole corrupted records.</li>
* <li>`FAILFAST` : throws an exception when it meets corrupted records.</li>
* </ul>
* </li>
* <li>`columnNameOfCorruptRecord` (default is the value specified in
* `spark.sql.columnNameOfCorruptRecord`): allows renaming the new field having malformed string
* created by `PERMISSIVE` mode. This overrides `spark.sql.columnNameOfCorruptRecord`.</li>
* <li>`multiLine` (default `false`): parse one record, which may span multiple lines.</li>
* </ul>
* @since 2.0.0
*/
@scala.annotation.varargs
def csv(paths: String*): DataFrame = format("csv").load(paths : _*)
/**
* Loads a Parquet file, returning the result as a `DataFrame`. See the documentation
* on the other overloaded `parquet()` method for more details.
*
* @since 2.0.0
*/
def parquet(path: String): DataFrame = {
// This method ensures that calls that explicit need single argument works, see SPARK-16009
parquet(Seq(path): _*)
}
/**
* Loads a Parquet file, returning the result as a `DataFrame`.
*
* You can set the following Parquet-specific option(s) for reading Parquet files:
* <ul>
* <li>`mergeSchema` (default is the value specified in `spark.sql.parquet.mergeSchema`): sets
* whether we should merge schemas collected from all Parquet part-files. This will override
* `spark.sql.parquet.mergeSchema`.</li>
* </ul>
* @since 1.4.0
*/
@scala.annotation.varargs
def parquet(paths: String*): DataFrame = {
format("parquet").load(paths: _*)
}
/**
* Loads an ORC file and returns the result as a `DataFrame`.
*
* @param path input path
* @since 1.5.0
* @note Currently, this method can only be used after enabling Hive support.
*/
def orc(path: String): DataFrame = {
// This method ensures that calls that explicit need single argument works, see SPARK-16009
orc(Seq(path): _*)
}
/**
* Loads ORC files and returns the result as a `DataFrame`.
*
* @param paths input paths
* @since 2.0.0
* @note Currently, this method can only be used after enabling Hive support.
*/
@scala.annotation.varargs
def orc(paths: String*): DataFrame = format("orc").load(paths: _*)
/**
* Returns the specified table as a `DataFrame`.
*
* @since 1.4.0
*/
def table(tableName: String): DataFrame = {
assertNoSpecifiedSchema("table")
sparkSession.table(tableName)
}
/**
* Loads text files and returns a `DataFrame` whose schema starts with a string column named
* "value", and followed by partitioned columns if there are any. See the documentation on
* the other overloaded `text()` method for more details.
*
* @since 2.0.0
*/
def text(path: String): DataFrame = {
// This method ensures that calls that explicit need single argument works, see SPARK-16009
text(Seq(path): _*)
}
/**
* Loads text files and returns a `DataFrame` whose schema starts with a string column named
* "value", and followed by partitioned columns if there are any.
*
* Each line in the text files is a new row in the resulting DataFrame. For example:
* {{{
* // Scala:
* spark.read.text("/path/to/spark/README.md")
*
* // Java:
* spark.read().text("/path/to/spark/README.md")
* }}}
*
* @param paths input paths
* @since 1.6.0
*/
@scala.annotation.varargs
def text(paths: String*): DataFrame = format("text").load(paths : _*)
/**
* Loads text files and returns a [[Dataset]] of String. See the documentation on the
* other overloaded `textFile()` method for more details.
* @since 2.0.0
*/
def textFile(path: String): Dataset[String] = {
// This method ensures that calls that explicit need single argument works, see SPARK-16009
textFile(Seq(path): _*)
}
/**
* Loads text files and returns a [[Dataset]] of String. The underlying schema of the Dataset
* contains a single string column named "value".
*
* If the directory structure of the text files contains partitioning information, those are
* ignored in the resulting Dataset. To include partitioning information as columns, use `text`.
*
* Each line in the text files is a new element in the resulting Dataset. For example:
* {{{
* // Scala:
* spark.read.textFile("/path/to/spark/README.md")
*
* // Java:
* spark.read().textFile("/path/to/spark/README.md")
* }}}
*
* @param paths input path
* @since 2.0.0
*/
@scala.annotation.varargs
def textFile(paths: String*): Dataset[String] = {
assertNoSpecifiedSchema("textFile")
text(paths : _*).select("value").as[String](sparkSession.implicits.newStringEncoder)
}
/**
* A convenient function for schema validation in APIs.
*/
private def assertNoSpecifiedSchema(operation: String): Unit = {
if (userSpecifiedSchema.nonEmpty) {
throw new AnalysisException(s"User specified schema not supported with `$operation`")
}
}
/**
* A convenient function for schema validation in datasources supporting
* `columnNameOfCorruptRecord` as an option.
*/
private def verifyColumnNameOfCorruptRecord(
schema: StructType,
columnNameOfCorruptRecord: String): Unit = {
schema.getFieldIndex(columnNameOfCorruptRecord).foreach { corruptFieldIndex =>
val f = schema(corruptFieldIndex)
if (f.dataType != StringType || !f.nullable) {
throw new AnalysisException(
"The field for corrupt records must be string type and nullable")
}
}
}
///////////////////////////////////////////////////////////////////////////////////////
// Builder pattern config options
///////////////////////////////////////////////////////////////////////////////////////
private var source: String = sparkSession.sessionState.conf.defaultDataSourceName
private var userSpecifiedSchema: Option[StructType] = None
private val extraOptions = new scala.collection.mutable.HashMap[String, String]
}
| minixalpha/spark | sql/core/src/main/scala/org/apache/spark/sql/DataFrameReader.scala | Scala | apache-2.0 | 30,433 |
package com.wavesplatform.it.sync.transactions
import com.typesafe.config.Config
import com.wavesplatform.account.AddressScheme
import com.wavesplatform.common.state.ByteStr
import com.wavesplatform.common.utils.EitherExt2
import com.wavesplatform.it.api.SyncHttpApi._
import com.wavesplatform.it.api.TransactionInfo
import com.wavesplatform.it.sync._
import com.wavesplatform.it.{BaseFreeSpec, IntegrationSuiteWithThreeAddresses, NodeConfigs}
import com.wavesplatform.state.diffs.FeeValidation
import com.wavesplatform.test._
import com.wavesplatform.transaction.Asset.IssuedAsset
import com.wavesplatform.transaction.TxVersion
import com.wavesplatform.transaction.assets.SponsorFeeTransaction
import org.scalatest.Assertion
import scala.concurrent.duration._
class SponsorshipSuite
extends BaseFreeSpec
with IntegrationSuiteWithThreeAddresses {
override def nodeConfigs: Seq[Config] =
NodeConfigs.newBuilder
.overrideBase(_.quorum(0))
.overrideBase(_.preactivatedFeatures((14, 1000000)))
.overrideBase(_.raw("waves.blockchain.custom.functionality.blocks-for-feature-activation=1"))
.overrideBase(_.raw("waves.blockchain.custom.functionality.feature-check-blocks-period=1"))
.withDefault(1)
.withSpecial(1, _.nonMiner)
.buildNonConflicting()
private def sponsor = firstKeyPair
private def alice = secondKeyPair
private def bob = thirdKeyPair
private lazy val bobAddress = bob.toAddress.toString
val Waves = 100000000L
val Token = 100L
val sponsorAssetTotal = 100 * Token
val minSponsorFee = Token
val TinyFee = Token / 2
val SmallFee = Token + Token / 2
val LargeFee = 10 * Token
var sponsorWavesBalance = 0L
var minerWavesBalance = 0L
var minerWavesBalanceAfterFirstXferTest = 0L
var sponsorWavesBalanceAfterFirstXferTest = 0L
var firstSponsorAssetId: String = ""
var secondSponsorAssetId: String = ""
var firstTransferTxToAlice: String = ""
var secondTransferTxToAlice: String = ""
var firstSponsorTxId: String = ""
var secondSponsorTxId: String = ""
def assertMinAssetFee(txId: String, sponsorship: Long): Assertion = {
val txInfo = miner.transactionInfo[TransactionInfo](txId)
assert(txInfo.minSponsoredAssetFee.contains(sponsorship))
}
def assertSponsorship(assetId: String, sponsorship: Long): Assertion = {
val assetInfo = miner.assetsDetails(assetId)
assert(assetInfo.minSponsoredAssetFee == Some(sponsorship).filter(_ != 0))
}
private lazy val aliceAddress: String = alice.toAddress.toString
private lazy val sponsorAddress: String = sponsor.toAddress.toString
protected override def beforeAll(): Unit = {
super.beforeAll()
sponsorWavesBalance = sender.accountBalances(sponsorAddress)._2
minerWavesBalance = sender.accountBalances(miner.address)._2
minerWavesBalanceAfterFirstXferTest = minerWavesBalance + 2 * issueFee + 2 * sponsorReducedFee + 2 * minFee + 2 * FeeValidation.FeeUnit * SmallFee / minSponsorFee
sponsorWavesBalanceAfterFirstXferTest = sponsorWavesBalance - 2 * issueFee - 2 * sponsorReducedFee - 2 * minFee - 2 * FeeValidation.FeeUnit * SmallFee / minSponsorFee
firstSponsorAssetId = sender
.issue(
sponsor,
"AssetTxV1",
"Created by Sponsorship Suite",
sponsorAssetTotal,
decimals = 2,
reissuable = false,
fee = issueFee,
waitForTx = true
)
.id
secondSponsorAssetId = sender
.issue(
sponsor,
"AssetTxV2",
"Created by Sponsorship Suite",
sponsorAssetTotal,
decimals = 2,
reissuable = false,
fee = issueFee,
waitForTx = true
)
.id
firstTransferTxToAlice =
sender.transfer(sponsor, aliceAddress, sponsorAssetTotal / 2, minFee, Some(firstSponsorAssetId), None, waitForTx = true).id
secondTransferTxToAlice =
sender.transfer(sponsor, aliceAddress, sponsorAssetTotal / 2, minFee, Some(secondSponsorAssetId), None, waitForTx = true).id
firstSponsorTxId = sender.sponsorAsset(sponsor, firstSponsorAssetId, baseFee = Token, fee = sponsorReducedFee, version = TxVersion.V1).id
secondSponsorTxId = sender.sponsorAsset(sponsor, secondSponsorAssetId, baseFee = Token, fee = sponsorReducedFee, version = TxVersion.V2).id
}
"Fee in sponsored asset works fine for transaction" - {
"make assets sponsored" in {
nodes.waitForHeightAriseAndTxPresent(firstSponsorTxId)
nodes.waitForHeightAriseAndTxPresent(secondSponsorTxId)
sender.transactionInfo[TransactionInfo](secondSponsorTxId).chainId shouldBe Some(AddressScheme.current.chainId)
assertSponsorship(firstSponsorAssetId, 1 * Token)
assertSponsorship(secondSponsorAssetId, 1 * Token)
assertMinAssetFee(firstSponsorTxId, 1 * Token)
assertMinAssetFee(secondSponsorTxId, 1 * Token)
}
"check balance before test accounts balances" in {
for (sponsorAssetId <- Seq(firstSponsorAssetId, secondSponsorAssetId)) {
sender.assertAssetBalance(sponsorAddress, sponsorAssetId, sponsorAssetTotal / 2)
sender.assertAssetBalance(aliceAddress, sponsorAssetId, sponsorAssetTotal / 2)
val assetInfo = sender.assetsBalance(aliceAddress).balances.filter(_.assetId == sponsorAssetId).head
assetInfo.minSponsoredAssetFee shouldBe Some(Token)
assetInfo.sponsorBalance shouldBe Some(sender.accountBalances(sponsorAddress)._2)
}
}
"sender cannot make transfer" - {
"invalid tx timestamp" in {
for (v <- sponsorshipTxSupportedVersions) {
def invalidTx(timestamp: Long): SponsorFeeTransaction =
SponsorFeeTransaction
.selfSigned(
version = v,
sponsor,
IssuedAsset(ByteStr.decodeBase58(firstSponsorAssetId).get),
Some(SmallFee),
minFee,
timestamp + 1.day.toMillis
)
.explicitGet()
val iTx = invalidTx(timestamp = System.currentTimeMillis + 1.day.toMillis)
assertBadRequestAndResponse(sender.broadcastRequest(iTx.json()), "Transaction timestamp .* is more than .*ms in the future")
}
}
}
"fee should be written off in issued asset" - {
"alice transfer sponsored asset to bob using sponsored fee" in {
val firstTransferTxCustomFeeAlice =
sender.transfer(alice, bobAddress, 10 * Token, SmallFee, Some(firstSponsorAssetId), Some(firstSponsorAssetId)).id
val secondTransferTxCustomFeeAlice =
sender.transfer(alice, bobAddress, 10 * Token, SmallFee, Some(secondSponsorAssetId), Some(secondSponsorAssetId)).id
nodes.waitForHeightArise()
nodes.waitForTransaction(firstTransferTxCustomFeeAlice)
nodes.waitForTransaction(secondTransferTxCustomFeeAlice)
sender.assertAssetBalance(aliceAddress, firstSponsorAssetId, sponsorAssetTotal / 2 - SmallFee - 10 * Token)
sender.assertAssetBalance(aliceAddress, secondSponsorAssetId, sponsorAssetTotal / 2 - SmallFee - 10 * Token)
sender.assertAssetBalance(bobAddress, firstSponsorAssetId, 10 * Token)
sender.assertAssetBalance(bobAddress, secondSponsorAssetId, 10 * Token)
val aliceTxs = sender.transactionsByAddress(aliceAddress, 100)
aliceTxs.size shouldBe 5 //not 4, because there was one more transaction in IntegrationSuiteWithThreeAddresses class
aliceTxs.count(tx => tx.sender.contains(aliceAddress) || tx.recipient.contains(aliceAddress)) shouldBe 5
aliceTxs.map(_.id) should contain allElementsOf Seq(
firstTransferTxToAlice,
secondTransferTxToAlice,
firstTransferTxCustomFeeAlice,
secondTransferTxCustomFeeAlice
)
val bobTxs = sender.transactionsByAddress(bobAddress, 100)
bobTxs.size shouldBe 3
bobTxs.count(tx => tx.sender.contains(bobAddress) || tx.recipient.contains(bobAddress)) shouldBe 3
bobTxs.map(_.id) should contain allElementsOf Seq(firstTransferTxCustomFeeAlice, secondTransferTxCustomFeeAlice)
}
"check transactions by address" in {
val minerTxs = sender.transactionsByAddress(miner.address, 100)
minerTxs.size shouldBe 4
val sponsorTxs = sender.transactionsByAddress(sponsorAddress, 100)
sponsorTxs.size shouldBe 9 //TODO: bug?
sponsorTxs.count(tx => tx.sender.contains(sponsorAddress) || tx.recipient.contains(sponsorAddress)) shouldBe 7
sponsorTxs.map(_.id) should contain allElementsOf Seq(
firstSponsorAssetId,
secondSponsorAssetId,
firstTransferTxToAlice,
secondTransferTxToAlice,
firstSponsorTxId,
secondSponsorTxId
)
}
"sponsor should receive sponsored asset as fee, waves should be written off" in {
miner.assertAssetBalance(sponsorAddress, firstSponsorAssetId, sponsorAssetTotal / 2 + SmallFee)
miner.assertAssetBalance(sponsorAddress, secondSponsorAssetId, sponsorAssetTotal / 2 + SmallFee)
miner.assertBalances(sponsorAddress, sponsorWavesBalanceAfterFirstXferTest)
}
"miner waves balance should be changed" in {
miner.assertBalances(miner.address, minerWavesBalanceAfterFirstXferTest)
}
}
"assets balance should contain sponsor fee info and sponsor balance" in {
val sponsorLeaseSomeWaves = sender.lease(sponsor, bobAddress, leasingAmount, leasingFee).id
nodes.waitForHeightAriseAndTxPresent(sponsorLeaseSomeWaves)
val (_, sponsorEffectiveBalance) = sender.accountBalances(sponsorAddress)
val aliceFirstSponsorAssetBalance = sender.assetsBalance(aliceAddress).balances.filter(_.assetId == firstSponsorAssetId).head
val aliceSecondSponsorAssetBalance = sender.assetsBalance(aliceAddress).balances.filter(_.assetId == secondSponsorAssetId).head
aliceFirstSponsorAssetBalance.minSponsoredAssetFee shouldBe Some(minSponsorFee)
aliceSecondSponsorAssetBalance.minSponsoredAssetFee shouldBe Some(minSponsorFee)
aliceFirstSponsorAssetBalance.sponsorBalance shouldBe Some(sponsorEffectiveBalance)
aliceSecondSponsorAssetBalance.sponsorBalance shouldBe Some(sponsorEffectiveBalance)
}
"waves fee depends on sponsor fee and sponsored token decimals" in {
val transferTxCustomLargeFeeAlice1 = sender.transfer(alice, bobAddress, 1.waves, LargeFee, None, Some(firstSponsorAssetId)).id
val transferTxCustomLargeFeeAlice2 = sender.transfer(alice, bobAddress, 1.waves, LargeFee, None, Some(secondSponsorAssetId)).id
nodes.waitForHeightAriseAndTxPresent(transferTxCustomLargeFeeAlice1)
nodes.waitForHeightAriseAndTxPresent(transferTxCustomLargeFeeAlice2)
sender.assertAssetBalance(sponsorAddress, firstSponsorAssetId, sponsorAssetTotal / 2 + SmallFee + LargeFee)
sender.assertAssetBalance(sponsorAddress, secondSponsorAssetId, sponsorAssetTotal / 2 + SmallFee + LargeFee)
sender.assertAssetBalance(aliceAddress, firstSponsorAssetId, sponsorAssetTotal / 2 - SmallFee - LargeFee - 10 * Token)
sender.assertAssetBalance(aliceAddress, secondSponsorAssetId, sponsorAssetTotal / 2 - SmallFee - LargeFee - 10 * Token)
sender.assertAssetBalance(bobAddress, firstSponsorAssetId, 10 * Token)
sender.assertAssetBalance(bobAddress, secondSponsorAssetId, 10 * Token)
sender.assertBalances(
sponsorAddress,
sponsorWavesBalanceAfterFirstXferTest - FeeValidation.FeeUnit * 2 * LargeFee / Token - leasingFee,
sponsorWavesBalanceAfterFirstXferTest - FeeValidation.FeeUnit * 2 * LargeFee / Token - leasingFee - leasingAmount
)
miner.assertBalances(miner.address, minerWavesBalanceAfterFirstXferTest + FeeValidation.FeeUnit * 2 * LargeFee / Token + leasingFee)
}
"cancel sponsorship" - {
"cancel" in {
val cancelFirstSponsorTxId = sender.cancelSponsorship(sponsor, firstSponsorAssetId, fee = issueFee, version = TxVersion.V1).id
val cancelSecondSponsorTxId = sender.cancelSponsorship(sponsor, secondSponsorAssetId, fee = issueFee, version = TxVersion.V2).id
nodes.waitForHeightAriseAndTxPresent(cancelFirstSponsorTxId)
nodes.waitForHeightAriseAndTxPresent(cancelSecondSponsorTxId)
}
"check asset details info" in {
for (sponsorAssetId <- Seq(firstSponsorAssetId, secondSponsorAssetId)) {
val assetInfo = sender.assetsBalance(aliceAddress).balances.filter(_.assetId == sponsorAssetId).head
assetInfo.minSponsoredAssetFee shouldBe None
assetInfo.sponsorBalance shouldBe None
}
}
"cannot pay fees in non sponsored assets" in {
assertBadRequestAndResponse(
sender.transfer(alice, bobAddress, 10 * Token, fee = 1 * Token, assetId = None, feeAssetId = Some(firstSponsorAssetId)).id,
s"Asset $firstSponsorAssetId is not sponsored, cannot be used to pay fees"
)
assertBadRequestAndResponse(
sender.transfer(alice, bobAddress, 10 * Token, fee = 1 * Token, assetId = None, feeAssetId = Some(secondSponsorAssetId)).id,
s"Asset $secondSponsorAssetId is not sponsored, cannot be used to pay fees"
)
}
"check cancel transaction info" in {
assertSponsorship(firstSponsorAssetId, 0L)
assertSponsorship(secondSponsorAssetId, 0L)
}
"check sponsor and miner balances after cancel" in {
sender.assertBalances(
sponsorAddress,
sponsorWavesBalanceAfterFirstXferTest - FeeValidation.FeeUnit * 2 * LargeFee / Token - leasingFee - 2 * issueFee,
sponsorWavesBalanceAfterFirstXferTest - FeeValidation.FeeUnit * 2 * LargeFee / Token - leasingFee - leasingAmount - 2 * issueFee
)
miner.assertBalances(
miner.address,
minerWavesBalanceAfterFirstXferTest + FeeValidation.FeeUnit * 2 * LargeFee / Token + leasingFee + 2 * issueFee
)
}
"cancel sponsorship again" in {
val cancelSponsorshipTxId1 = sender.cancelSponsorship(sponsor, firstSponsorAssetId, fee = issueFee, version = TxVersion.V1).id
val cancelSponsorshipTxId2 = sender.cancelSponsorship(sponsor, firstSponsorAssetId, fee = issueFee, version = TxVersion.V2).id
nodes.waitForHeightArise()
nodes.waitForTransaction(cancelSponsorshipTxId1)
nodes.waitForTransaction(cancelSponsorshipTxId2)
}
}
"set sponsopship again" - {
"set sponsorship and check new asset details, min sponsored fee changed" in {
val setAssetSponsoredTx1 = sender.sponsorAsset(sponsor, firstSponsorAssetId, fee = issueFee, baseFee = TinyFee, version = TxVersion.V1).id
val setAssetSponsoredTx2 = sender.sponsorAsset(sponsor, secondSponsorAssetId, fee = issueFee, baseFee = TinyFee, version = TxVersion.V2).id
nodes.waitForHeightAriseAndTxPresent(setAssetSponsoredTx1)
nodes.waitForHeightAriseAndTxPresent(setAssetSponsoredTx2)
for (sponsorAssetId <- Seq(firstSponsorAssetId, secondSponsorAssetId)) {
val assetInfo = sender.assetsBalance(aliceAddress).balances.filter(_.assetId == sponsorAssetId).head
assetInfo.minSponsoredAssetFee shouldBe Some(Token / 2)
assetInfo.sponsorBalance shouldBe Some(sender.accountBalances(sponsorAddress)._2)
}
}
"make transfer with new min sponsored fee" in {
val sponsoredBalance = sender.accountBalances(sponsorAddress)
val sponsorFirstAssetBalance = sender.assetBalance(sponsorAddress, firstSponsorAssetId).balance
val sponsorSecondAssetBalance = sender.assetBalance(sponsorAddress, secondSponsorAssetId).balance
val aliceFirstAssetBalance = sender.assetBalance(aliceAddress, firstSponsorAssetId).balance
val aliceSecondAssetBalance = sender.assetBalance(aliceAddress, secondSponsorAssetId).balance
val aliceWavesBalance = sender.accountBalances(aliceAddress)
val bobFirstAssetBalance = sender.assetBalance(bobAddress, firstSponsorAssetId).balance
val bobSecondAssetBalance = sender.assetBalance(bobAddress, secondSponsorAssetId).balance
val bobWavesBalance = sender.accountBalances(bobAddress)
val minerBalance = miner.accountBalances(miner.address)
val minerFirstAssetBalance = miner.assetBalance(miner.address, firstSponsorAssetId).balance
val minerSecondAssetBalance = miner.assetBalance(miner.address, secondSponsorAssetId).balance
val transferTxCustomFeeAlice1 = sender.transfer(alice, bobAddress, 1.waves, TinyFee, None, Some(firstSponsorAssetId)).id
val transferTxCustomFeeAlice2 = sender.transfer(alice, bobAddress, 1.waves, TinyFee, None, Some(secondSponsorAssetId)).id
nodes.waitForHeight(
math.max(
sender.waitForTransaction(transferTxCustomFeeAlice1).height,
sender.waitForTransaction(transferTxCustomFeeAlice2).height
) + 2
)
val wavesFee = FeeValidation.FeeUnit * 2 * TinyFee / TinyFee
sender.assertBalances(sponsorAddress, sponsoredBalance._1 - wavesFee, sponsoredBalance._2 - wavesFee)
sender.assertAssetBalance(sponsorAddress, firstSponsorAssetId, sponsorFirstAssetBalance + TinyFee)
sender.assertAssetBalance(sponsorAddress, secondSponsorAssetId, sponsorSecondAssetBalance + TinyFee)
sender.assertAssetBalance(aliceAddress, firstSponsorAssetId, aliceFirstAssetBalance - TinyFee)
sender.assertAssetBalance(aliceAddress, secondSponsorAssetId, aliceSecondAssetBalance - TinyFee)
sender.assertBalances(aliceAddress, aliceWavesBalance._1 - 2.waves, aliceWavesBalance._2 - 2.waves)
sender.assertBalances(bobAddress, bobWavesBalance._1 + 2.waves, bobWavesBalance._2 + 2.waves)
sender.assertAssetBalance(bobAddress, firstSponsorAssetId, bobFirstAssetBalance)
sender.assertAssetBalance(bobAddress, secondSponsorAssetId, bobSecondAssetBalance)
miner.assertBalances(miner.address, minerBalance._2 + wavesFee)
miner.assertAssetBalance(miner.address, firstSponsorAssetId, minerFirstAssetBalance)
miner.assertAssetBalance(miner.address, secondSponsorAssetId, minerSecondAssetBalance)
}
"change sponsorship fee in active sponsored asset" in {
val setAssetSponsoredTx1 = sender.sponsorAsset(sponsor, firstSponsorAssetId, fee = issueFee, baseFee = LargeFee, version = TxVersion.V1).id
val setAssetSponsoredTx2 = sender.sponsorAsset(sponsor, secondSponsorAssetId, fee = issueFee, baseFee = LargeFee, version = TxVersion.V2).id
nodes.waitForHeightArise()
nodes.waitForHeightAriseAndTxPresent(setAssetSponsoredTx1)
nodes.waitForTransaction(setAssetSponsoredTx2)
for (sponsorAssetId <- Seq(firstSponsorAssetId, secondSponsorAssetId)) {
val assetInfo = sender.assetsBalance(aliceAddress).balances.filter(_.assetId == sponsorAssetId).head
assetInfo.minSponsoredAssetFee shouldBe Some(LargeFee)
assetInfo.sponsorBalance shouldBe Some(sender.accountBalances(sponsorAddress)._2)
}
}
"transfer tx sponsored fee is less then new minimal" in {
assertBadRequestAndResponse(
sender
.transfer(sponsor, aliceAddress, 11 * Token, fee = SmallFee, assetId = Some(firstSponsorAssetId), feeAssetId = Some(firstSponsorAssetId))
.id,
s"Fee for TransferTransaction \\\\($SmallFee in ${Some(firstSponsorAssetId).get}\\\\) does not exceed minimal value of 100000 WAVES or $LargeFee ${Some(firstSponsorAssetId).get}"
)
assertBadRequestAndResponse(
sender
.transfer(
sponsor,
aliceAddress,
11 * Token,
fee = SmallFee,
assetId = Some(secondSponsorAssetId),
feeAssetId = Some(secondSponsorAssetId)
)
.id,
s"Fee for TransferTransaction \\\\($SmallFee in ${Some(secondSponsorAssetId).get}\\\\) does not exceed minimal value of 100000 WAVES or $LargeFee ${Some(secondSponsorAssetId).get}"
)
}
"make transfer with updated min sponsored fee" in {
val sponsoredBalance = sender.accountBalances(sponsorAddress)
val sponsorFirstAssetBalance = sender.assetBalance(sponsorAddress, firstSponsorAssetId).balance
val sponsorSecondAssetBalance = sender.assetBalance(sponsorAddress, secondSponsorAssetId).balance
val aliceFirstAssetBalance = sender.assetBalance(aliceAddress, firstSponsorAssetId).balance
val aliceSecondAssetBalance = sender.assetBalance(aliceAddress, firstSponsorAssetId).balance
val aliceWavesBalance = sender.accountBalances(aliceAddress)
val bobWavesBalance = sender.accountBalances(bobAddress)
val minerBalance = miner.accountBalances(miner.address)
val transferTxCustomFeeAlice1 = sender.transfer(alice, bobAddress, 1.waves, LargeFee, None, Some(firstSponsorAssetId)).id
val transferTxCustomFeeAlice2 = sender.transfer(alice, bobAddress, 1.waves, LargeFee, None, Some(secondSponsorAssetId)).id
nodes.waitForHeightArise()
nodes.waitForTransaction(transferTxCustomFeeAlice1)
nodes.waitForTransaction(transferTxCustomFeeAlice2)
val wavesFee = FeeValidation.FeeUnit * 2 * LargeFee / LargeFee
nodes.waitForHeightArise()
sender.assertBalances(sponsorAddress, sponsoredBalance._1 - wavesFee, sponsoredBalance._2 - wavesFee)
sender.assertAssetBalance(sponsorAddress, firstSponsorAssetId, sponsorFirstAssetBalance + LargeFee)
sender.assertAssetBalance(sponsorAddress, secondSponsorAssetId, sponsorSecondAssetBalance + LargeFee)
sender.assertAssetBalance(aliceAddress, firstSponsorAssetId, aliceFirstAssetBalance - LargeFee)
sender.assertAssetBalance(aliceAddress, secondSponsorAssetId, aliceSecondAssetBalance - LargeFee)
sender.assertBalances(aliceAddress, aliceWavesBalance._1 - 2.waves, aliceWavesBalance._2 - 2.waves)
sender.assertBalances(bobAddress, bobWavesBalance._1 + 2.waves, bobWavesBalance._2 + 2.waves)
miner.assertBalances(miner.address, minerBalance._1 + wavesFee, minerBalance._2 + wavesFee)
}
}
"issue asset make sponsor and burn and reissue" in {
val sponsorBalance = sender.accountBalances(sponsorAddress)
val minerBalance = miner.accountBalances(miner.address)
val firstSponsorAssetId2 =
sender
.issue(
sponsor,
"Another1",
"Created by Sponsorship Suite",
sponsorAssetTotal,
decimals = 2,
fee = issueFee,
waitForTx = true
)
.id
val secondSponsorAssetId2 =
sender
.issue(
sponsor,
"Another2",
"Created by Sponsorship Suite",
sponsorAssetTotal,
decimals = 2,
reissuable = true,
fee = issueFee,
waitForTx = true
)
.id
val sponsorTxId1 = sender.sponsorAsset(sponsor, firstSponsorAssetId2, baseFee = Token, fee = sponsorReducedFee, version = TxVersion.V1).id
val sponsorTxId2 = sender.sponsorAsset(sponsor, secondSponsorAssetId2, baseFee = Token, fee = sponsorReducedFee, version = TxVersion.V2).id
sender.transfer(sponsor, aliceAddress, sponsorAssetTotal / 2, minFee, Some(firstSponsorAssetId2), None, waitForTx = true).id
sender.transfer(sponsor, aliceAddress, sponsorAssetTotal / 2, minFee, Some(secondSponsorAssetId2), None, waitForTx = true).id
nodes.waitForHeightAriseAndTxPresent(sponsorTxId1)
nodes.waitForTransaction(sponsorTxId2)
sender.burn(sponsor, firstSponsorAssetId2, sponsorAssetTotal / 2, burnFee, waitForTx = true).id
sender.burn(sponsor, secondSponsorAssetId2, sponsorAssetTotal / 2, burnFee, waitForTx = true).id
for (sponsorAssetId2 <- Seq(firstSponsorAssetId2, secondSponsorAssetId2)) {
val assetInfo = sender.assetsDetails(sponsorAssetId2)
assetInfo.minSponsoredAssetFee shouldBe Some(Token)
assetInfo.quantity shouldBe sponsorAssetTotal / 2
}
sender.reissue(sponsor, firstSponsorAssetId2, sponsorAssetTotal, reissuable = true, issueFee, waitForTx = true).id
sender.reissue(sponsor, secondSponsorAssetId2, sponsorAssetTotal, reissuable = true, issueFee, waitForTx = true).id
for (sponsorAssetId2 <- Seq(firstSponsorAssetId2, secondSponsorAssetId2)) {
val assetInfoAfterReissue = sender.assetsDetails(sponsorAssetId2)
assetInfoAfterReissue.minSponsoredAssetFee shouldBe Some(Token)
assetInfoAfterReissue.quantity shouldBe sponsorAssetTotal / 2 + sponsorAssetTotal
assetInfoAfterReissue.reissuable shouldBe true
}
val aliceTransferWaves1 = sender.transfer(alice, bobAddress, transferAmount, SmallFee, None, Some(firstSponsorAssetId2), waitForTx = true).id
val aliceTransferWaves2 = sender.transfer(alice, bobAddress, transferAmount, SmallFee, None, Some(secondSponsorAssetId2), waitForTx = true).id
nodes.waitForHeightAriseAndTxPresent(aliceTransferWaves1)
nodes.waitForHeightAriseAndTxPresent(aliceTransferWaves2)
val totalWavesFee = FeeValidation.FeeUnit * 2 * SmallFee / Token + 2 * issueFee + 2 * sponsorReducedFee + 2 * burnFee + 2 * minFee + 2 * issueFee
miner.assertBalances(miner.address, minerBalance._1 + totalWavesFee, minerBalance._2 + totalWavesFee)
sender.assertBalances(sponsorAddress, sponsorBalance._1 - totalWavesFee, sponsorBalance._2 - totalWavesFee)
sender.assertAssetBalance(sponsorAddress, firstSponsorAssetId2, SmallFee + sponsorAssetTotal)
sender.assertAssetBalance(sponsorAddress, secondSponsorAssetId2, SmallFee + sponsorAssetTotal)
}
"miner is sponsor" in {
val minerBalance = miner.accountBalances(miner.address)
val firstMinersAsset =
miner
.issue(
miner.keyPair,
"MinersAsset1",
"Created by Sponsorship Suite",
sponsorAssetTotal,
decimals = 8,
fee = issueFee,
waitForTx = true
)
.id
val secondMinersAsset =
miner
.issue(
miner.keyPair,
"MinersAsset2",
"Created by Sponsorship Suite",
sponsorAssetTotal,
decimals = 8,
reissuable = true,
fee = issueFee,
waitForTx = true
)
.id
val firstSponsorshipTxId =
miner.sponsorAsset(miner.keyPair, firstMinersAsset, baseFee = Token, fee = sponsorReducedFee, version = TxVersion.V1).id
val secondSponsorshipTxId =
miner.sponsorAsset(miner.keyPair, secondMinersAsset, baseFee = Token, fee = sponsorReducedFee, version = TxVersion.V2).id
nodes.waitForHeightAriseAndTxPresent(firstSponsorshipTxId)
nodes.waitForTransaction(secondSponsorshipTxId)
val minerFirstTransferTxId =
miner.transfer(miner.keyPair, aliceAddress, sponsorAssetTotal / 2, SmallFee, Some(firstMinersAsset), Some(firstMinersAsset)).id
val minerSecondTransferTxId =
miner.transfer(miner.keyPair, aliceAddress, sponsorAssetTotal / 2, SmallFee, Some(secondMinersAsset), Some(secondMinersAsset)).id
nodes.waitForHeightAriseAndTxPresent(minerFirstTransferTxId)
nodes.waitForHeightAriseAndTxPresent(minerSecondTransferTxId)
miner.assertBalances(miner.address, minerBalance._1)
val aliceFirstTransferWavesId = sender.transfer(alice, bobAddress, transferAmount, SmallFee, None, Some(firstMinersAsset)).id
val aliceSecondTransferWavesId = sender.transfer(alice, bobAddress, transferAmount, SmallFee, None, Some(secondMinersAsset)).id
nodes.waitForHeightAriseAndTxPresent(aliceFirstTransferWavesId)
nodes.waitForHeightAriseAndTxPresent(aliceSecondTransferWavesId)
miner.assertBalances(miner.address, minerBalance._1)
miner.assertAssetBalance(miner.address, firstMinersAsset, sponsorAssetTotal / 2 + SmallFee)
miner.assertAssetBalance(miner.address, secondMinersAsset, sponsorAssetTotal / 2 + SmallFee)
}
"tx is declined if sponsor has not enough effective balance to pay fee" in {
val sponsorEffectiveBalance = sender.accountBalances(sponsorAddress)._2
sender.lease(sponsor, bobAddress, sponsorEffectiveBalance - leasingFee, leasingFee, waitForTx = true).id
assertBadRequestAndMessage(
sender.transfer(alice, bobAddress, 10 * Token, LargeFee, Some(firstSponsorAssetId), Some(firstSponsorAssetId)),
"unavailable funds"
)
assertBadRequestAndMessage(
sender.transfer(alice, bobAddress, 10 * Token, LargeFee, Some(secondSponsorAssetId), Some(secondSponsorAssetId)),
"unavailable funds"
)
}
}
}
| wavesplatform/Waves | node-it/src/test/scala/com/wavesplatform/it/sync/transactions/SponsorshipSuite.scala | Scala | mit | 29,282 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.keras.nn
import com.intel.analytics.bigdl.keras.KerasBaseSpec
import com.intel.analytics.bigdl.nn.abstractnn.AbstractModule
import com.intel.analytics.bigdl.nn.keras.{Convolution1D, Conv1D, Sequential => KSequential}
import com.intel.analytics.bigdl.tensor.Tensor
import com.intel.analytics.bigdl.utils.Shape
import com.intel.analytics.bigdl.utils.serializer.ModuleSerializationTest
import scala.util.Random
class Convolution1DSpec extends KerasBaseSpec {
def weightConverter(in: Array[Tensor[Float]]): Array[Tensor[Float]] =
if (in.length == 1) in // without bias
else Array(in(0).resize(Array(1) ++ in(0).size()), in(1)) // with bias
"Convolution1D" should "be the same as Keras" in {
val kerasCode =
"""
|input_tensor = Input(shape=[12, 20])
|input = np.random.random([2, 12, 20])
|output_tensor = Convolution1D(64, 3)(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val seq = KSequential[Float]()
val layer = Convolution1D[Float](64, 3, inputShape = Shape(12, 20))
seq.add(layer)
seq.getOutputShape().toSingle().toArray should be (Array(-1, 10, 64))
checkOutputAndGrad(seq.asInstanceOf[AbstractModule[Tensor[Float], Tensor[Float], Float]],
kerasCode, weightConverter)
}
"Convolution1D without bias" should "be the same as Keras" in {
val kerasCode =
"""
|input_tensor = Input(shape=[20, 32])
|input = np.random.random([2, 20, 32])
|output_tensor = Convolution1D(32, 4, activation="relu", bias=False,
| subsample_length=2)(input_tensor)
|model = Model(input=input_tensor, output=output_tensor)
""".stripMargin
val seq = KSequential[Float]()
val layer = Conv1D[Float](32, 4, activation = "relu", subsampleLength = 2,
bias = false, inputShape = Shape(20, 32))
seq.add(layer)
seq.getOutputShape().toSingle().toArray should be (Array(-1, 9, 32))
checkOutputAndGrad(seq.asInstanceOf[AbstractModule[Tensor[Float], Tensor[Float], Float]],
kerasCode, weightConverter)
}
}
class Convolution1DSerialTest extends ModuleSerializationTest {
override def test(): Unit = {
val layer = Convolution1D[Float](64, 3, inputShape = Shape(12, 20))
layer.build(Shape(2, 12, 20))
val input = Tensor[Float](2, 12, 20).apply1(_ => Random.nextFloat())
runSerializationTest(layer, input)
}
}
| yiheng/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/keras/nn/Convolution1DSpec.scala | Scala | apache-2.0 | 3,080 |
package in.lambda_hc.furious_cyclist.rest.undertow.handlers.auth
import java.util.Date
import in.lambda_hc.furious_cyclist.utils.SecurityUtils
import io.undertow.server.{HttpHandler, HttpServerExchange}
import spray.json.{JsObject, JsString}
/**
* Created by vishnu on 12/6/16.
*/
class LogoutHandler extends HttpHandler {
override def handleRequest(exchange: HttpServerExchange): Unit = {
val cookie = SecurityUtils.createCookie("ssid", "").setExpires(new Date(System.currentTimeMillis() - 10000))
exchange.setResponseCookie(cookie)
exchange.getResponseSender.send(JsObject(
"status" -> JsString("ok"),
"message" -> JsString("User has been logged out")
).prettyPrint)
}
}
| lambda-hc/furious-cyclist | src/main/scala/in/lambda_hc/furious_cyclist/rest/undertow/handlers/auth/LogoutHandler.scala | Scala | apache-2.0 | 718 |
package lila.common
import akka.actor._
import scala.concurrent.duration._
// do NOT embed me in an actor
// for it would likely create a memory leak
final class Debouncer[A: Manifest](length: FiniteDuration, function: A => Unit) extends Actor {
private case object DelayEnd
private var delayed: Option[A] = none
def ready: Receive = {
case a: A =>
function(a)
context.system.scheduler.scheduleOnce(length, self, DelayEnd)
context become delay
}
def delay: Receive = {
case a: A => delayed = a.some
case DelayEnd =>
context become ready
delayed foreach { a =>
self ! a
delayed = none
}
}
def receive = ready
}
object Debouncer {
sealed trait Nothing
object Nothing extends Nothing
}
| r0k3/lila | modules/common/src/main/Debouncer.scala | Scala | mit | 777 |
trait IStringPair[T] {
def a : String
def b : String
def build(a : String, b : String) : T
def cat(that : IStringPair[T]) = build(this.a + that.a, this.b + that.b)
override def toString = a + b
}
class StringPair(val a : String, val b : String) extends IStringPair[StringPair] {
def build(a : String, b : String) = new StringPair(a, b)
def len = a.length + b.length
}
object Test {
def main(args: Array[String]): Unit = {
val a = new StringPair("A", "B")
val b = new StringPair("1", "2")
val c = a cat b
println(c.len)
}
}
| yusuke2255/dotty | tests/run/t3452.scala | Scala | bsd-3-clause | 559 |
/*
* Copyright 2015 Webtrends (http://www.webtrends.com)
*
* See the LICENCE.txt file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.webtrends.harness.service
import java.io.{File, FilenameFilter}
import java.nio.file._
import akka.actor.SupervisorStrategy.{Escalate, Restart, Stop}
import akka.actor._
import com.typesafe.config.{Config, ConfigFactory}
import com.webtrends.harness.HarnessConstants
import com.webtrends.harness.app.HarnessActor.{ConfigChange, SystemReady}
import com.webtrends.harness.app.PrepareForShutdown
import com.webtrends.harness.health.{ComponentState, HealthComponent}
import com.webtrends.harness.logging.LoggingAdapter
import com.webtrends.harness.service.ServiceManager.{GetMetaDataByName, RestartService, ServicesReady}
import com.webtrends.harness.service.messages.{GetMetaData, LoadService, Ready}
import com.webtrends.harness.service.meta.ServiceMetaData
import scala.Predef._
import scala.concurrent.Future
import scala.concurrent.duration._
import scala.util.control.Exception._
/**
* Request to return service meta data
*/
class ServiceManager extends PrepareForShutdown with ServiceLoader {
import context.dispatcher
override val supervisorStrategy =
OneForOneStrategy(maxNrOfRetries = 3, withinTimeRange = 1 minute) {
case _: ActorInitializationException => Stop
case _: DeathPactException => Stop
case _: ActorKilledException => Restart
case _: Exception => Restart
case _: Throwable => Escalate
}
override def preStart {
load(context)
// Tell the harness that the services are loaded. The parent will then
// tell us when it is ready so that the services can be notified
context.parent ! ServicesReady
log.info("Service manager started: {}", context.self.path)
}
override def postStop(): Unit = {
// Kill any custom class loaders
services.values foreach {
p =>
if (p._2.isDefined) p._2.get.close()
}
services.clear
log.info("Service manager stopped: {}", context.self.path)
}
override def receive = super.receive orElse {
case SystemReady =>
log.info("Notifying Services that we are completely ready.")
context.children foreach {
p => p ! Ready(getServiceMeta(Some(p.path)).head)
}
case GetMetaData(path) =>
log.info("We have received a message to get service meta data")
val meta = getServiceMeta(path)
path match {
case None => sender() ! meta // Asked for all services
case _ => sender() ! meta.head // Asked for only a specific service
}
case LoadService(name, clazz) =>
log.info(s"We have received a message to load service $name")
sender ! context.child(name).orElse(context.child(clazz.getSimpleName)).orElse(loadService(name, clazz))
case GetMetaDataByName(service) =>
log.info("We have received a message to get service meta data")
services.filter(_._1.name.equalsIgnoreCase(service)).keys.headOption match {
case Some(meta) => sender() ! meta
case None => sender() ! Status.Failure(new NoSuchElementException(s"Could not locate the meta information for service $service"))
}
case RestartService(service) =>
log.info(s"We have received a message to restart the service $service")
services.filter(_._1.name.equalsIgnoreCase(service)).keys.headOption match {
case Some(m) => services.get(m).get._1 ! Kill
case None =>
}
case c: ConfigChange =>
log.info("Sending config change message to all services...")
context.children foreach {
p => p ! c
}
case Terminated(service) =>
log.info("Service {} terminated", service.path.name)
// Find and nuke the classloader
services.filter(_._1.akkaPath == service.path.toString) foreach {
p =>
if (p._2._2.isDefined) {
p._2._2.get.close()
}
services.remove(p._1)
}
}
private def getServiceMeta(servicePath: Option[ActorPath]): Seq[ServiceMetaData] = {
try {
log.debug("Service meta requested")
servicePath match {
case None =>
services.keys.toSeq
case _ =>
Seq(services.filter(p => ActorPath.fromString(p._1.akkaPath).equals(servicePath.get)).keys.headOption.get)
}
} catch {
case e: Throwable =>
log.error("Error fetching service meta information", e)
Nil
}
}
/**
* This is the health of the current object, by default will be NORMAL
* In general this should be overridden to define the health of the current object
* For objects that simply manage other objects you shouldn't need to do anything
* else, as the health of the children components would be handled by their own
* CheckHealth function
*
* @return
*/
override protected def getHealth: Future[HealthComponent] = {
log.debug("Service health requested")
Future {
if (services.isEmpty) {
HealthComponent(ServiceManager.ServiceManagerName, ComponentState.CRITICAL, "There are no services currently installed")
} else if (context.children.size != services.size) {
HealthComponent(ServiceManager.ServiceManagerName, ComponentState.CRITICAL, s"There are ${services.size} installed, but only ${context.children.size} that were successfully loaded")
} else {
HealthComponent(ServiceManager.ServiceManagerName, ComponentState.NORMAL, s"Currently managing ${services.size} service(s)")
}
}
}
}
object ServiceManager extends LoggingAdapter {
val ServiceManagerName = "service-manager"
@SerialVersionUID(1L) case class ServicesReady()
@SerialVersionUID(1L) case class GetMetaDataByName(name: String)
@SerialVersionUID(1L) case class RestartService(name: String)
def props: Props = Props[ServiceManager]
/**
* Load the configuration files for the deployed services
* @param sysConfig System level config for wookiee
* @return
*/
def loadConfigs(sysConfig: Config): Seq[Config] = {
serviceDir(sysConfig) match {
case Some(s) =>
val dirs = s.listFiles.filter(_.isDirectory)
val configs = dirs flatMap {
dir =>
val path = dir.getPath.concat("/conf")
log.info("Checking the directory {} for any *.conf files to load", path)
for {
file <- getConfigFiles(path)
conf = allCatch either ConfigFactory.parseFile(file) match {
case Left(fail) => log.error(s"Could not load the config file ${file.getAbsolutePath}", fail); None
case Right(value) => Some(value)
}
if conf.isDefined
} yield conf.get
}
configs
case None => Seq()
}
}
/**
* Get the services directory
* @param config The systems main config
* @return The service root path, this is option, so if none then not found
*/
def serviceDir(config: Config): Option[File] = {
val file = FileSystems.getDefault.getPath(config.getString(HarnessConstants.KeyServicePath)).toFile
if (file.exists()) {
Some(file)
} else {
None
}
}
private def getConfigFiles(path: String): Seq[File] = {
val root = new File(path)
root.exists match {
case true =>
root.listFiles(new FilenameFilter {
def accept(dir: File, name: String): Boolean = name.endsWith(".conf")
})
case false =>
Seq.empty
}
}
}
| mjwallin1/wookiee | wookiee-core/src/main/scala/com/webtrends/harness/service/ServiceManager.scala | Scala | apache-2.0 | 8,107 |
package com.kubukoz.scala99
/**
* P06 (*) Find out whether a list is a palindrome.
* Example:
* scala> isPalindrome(List(1, 2, 3, 2, 1))
* res0: Boolean = true
*/
object P06 {
/**
* Simple and doesn't reverse the whole list but half of it.
**/
def isPalindrome(l: List[Any]): Boolean = {
val len = l.length
//@formatter: off
l.splitAt(len / 2) match {
case (first, second) if len % 2 == 0 => first == second.reverse
case (first, second) => first == second.tail.reverse
}
//@formatter: on
}
}
| kubukoz/scala-99 | src/main/scala/com/kubukoz/scala99/P06.scala | Scala | apache-2.0 | 568 |
/*
* Copyright 2012 Eike Kettner
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eknet.publet.web.shiro
import javax.servlet._
import org.apache.shiro.authz.{UnauthenticatedException, UnauthorizedException}
import http.{HttpServletRequest, HttpServletResponse}
import grizzled.slf4j.Logging
import org.eknet.publet.web.{PageWriter, PubletRequestWrapper}
/**
* @author Eike Kettner [email protected]
* @since 27.09.12 16:36
*/
abstract class AuthzFilter(val redirectToLoginPage: Boolean = true) extends Filter with PubletRequestWrapper with Logging {
def doFilter(req: ServletRequest, resp: ServletResponse, chain: FilterChain) {
try {
if (AuthcFilter.authenticationEnabled(req)) {
checkAccess(req)
}
chain.doFilter(req, resp)
} catch {
case uae: UnauthorizedException => onUnauthorized(uae, req, resp)
case se: ServletException if (se.getCause.isInstanceOf[UnauthorizedException]) => {
onUnauthorized(se.getCause.asInstanceOf[UnauthorizedException], req, resp)
}
case ue: UnauthenticatedException => onUnauthenticated(ue, req, resp)
}
}
/**
* Called if request is not authorized to access the current resource.
*
* @param ex
* @param req
* @param res
*/
def onUnauthorized(ex: UnauthorizedException, req: HttpServletRequest, res: HttpServletResponse)
/**
* Called if the request is not authenticated yet and not authorized to access
* the current resource.
*
* @param ex
* @param req
* @param res
*/
def onUnauthenticated(ex: UnauthenticatedException, req: HttpServletRequest, res: HttpServletResponse)
/**
* This is called before delegating to the filter chain. This method should do
* acccess checks and throw either [[org.apache.shiro.authz.UnauthenticatedException]]
* or [[org.apache.shiro.authz.UnauthorizedException]] that is then routed to
* the two methods `onUnauthenticated` and `onUnauthorized`.
*
* This method is only called if the shiro filter has been executed and
* shiro is setup properly.
*
* @param req
*/
def checkAccess(req: HttpServletRequest) {}
def init(filterConfig: FilterConfig) {}
def destroy() {}
}
| eikek/publet | web/src/main/scala/org/eknet/publet/web/shiro/AuthzFilter.scala | Scala | apache-2.0 | 2,732 |
package org.apache.spark.ml.bundle.ops.classification
import ml.combust.bundle.BundleContext
import ml.combust.bundle.dsl._
import ml.combust.bundle.op.{OpModel, OpNode}
import org.apache.spark.ml.bundle.{ParamSpec, SimpleParamSpec, SimpleSparkOp, SparkBundleContext}
import org.apache.spark.ml.classification.MultilayerPerceptronClassificationModel
import org.apache.spark.ml.linalg.Vectors
/**
* Created by hollinwilkins on 12/25/16.
*/
class MultiLayerPerceptronClassifierOp extends SimpleSparkOp[MultilayerPerceptronClassificationModel] {
override val Model: OpModel[SparkBundleContext, MultilayerPerceptronClassificationModel] = new OpModel[SparkBundleContext, MultilayerPerceptronClassificationModel] {
override def opName: String = Bundle.BuiltinOps.classification.multi_layer_perceptron_classifier
override val klazz: Class[MultilayerPerceptronClassificationModel] = classOf[MultilayerPerceptronClassificationModel]
override def store(model: Model, obj: MultilayerPerceptronClassificationModel)
(implicit context: BundleContext[SparkBundleContext]): Model = {
val thresholds = if(obj.isSet(obj.thresholds)) {
Some(obj.getThresholds)
} else None
model.withValue("layers", Value.longList(obj.getLayers.map(_.toLong))).
withValue("weights", Value.vector(obj.weights.toArray)).
withValue("thresholds", thresholds.map(_.toSeq).map(Value.doubleList))
}
override def load(model: Model)
(implicit context: BundleContext[SparkBundleContext]): MultilayerPerceptronClassificationModel = {
val m = new MultilayerPerceptronClassificationModel(uid = "",
weights = Vectors.dense(model.value("weights").getTensor[Double].toArray))
m.set(m.layers, model.value("layers").getLongList.map(_.toInt).toArray)
model.getValue("thresholds").
map(t => m.setThresholds(t.getDoubleList.toArray)).
getOrElse(m)
}
}
override def sparkLoad(uid: String, shape: NodeShape, model: MultilayerPerceptronClassificationModel): MultilayerPerceptronClassificationModel = {
val m = new MultilayerPerceptronClassificationModel(uid = uid, weights = model.weights)
m.set(m.layers, model.getLayers)
if (model.isSet(model.thresholds)) m.setThresholds(model.getThresholds)
m
}
override def sparkInputs(obj: MultilayerPerceptronClassificationModel): Seq[ParamSpec] = {
Seq("features" -> obj.featuresCol)
}
override def sparkOutputs(obj: MultilayerPerceptronClassificationModel): Seq[SimpleParamSpec] = {
Seq("raw_prediction" -> obj.rawPredictionCol,
"probability" -> obj.probabilityCol,
"prediction" -> obj.predictionCol)
}
}
| combust/mleap | mleap-spark/src/main/scala/org/apache/spark/ml/bundle/ops/classification/MultiLayerPerceptronClassifierOp.scala | Scala | apache-2.0 | 2,704 |
package chapter.fourteen
import ExerciseEight.{ Node, Leaf, eval }
import org.scalatest._
import org.scalatest.junit.JUnitRunner
import org.junit.runner.RunWith
@RunWith(classOf[JUnitRunner])
class ExerciseEightSpec extends FlatSpec with Matchers {
"eval" should "the value of a tree" in {
val tree = Node('+', Node('*', Leaf(3), Leaf(8)), Leaf(2), Node('-', Leaf(5)))
eval(tree) should be (21)
val anotherTree = Node('/', Leaf(252), Node('+', Leaf(2), Leaf(1)), Node('*', Leaf(2), Leaf(1)), Leaf(1))
eval(anotherTree) should be (42)
}
}
| deekim/impatient-scala | src/test/scala/chapter/fourteen/ExerciseEightSpec.scala | Scala | apache-2.0 | 564 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.mllib.fpm
import scala.collection.JavaConverters._
import scala.reflect.ClassTag
import org.apache.spark.annotation.Since
import org.apache.spark.api.java.JavaRDD
import org.apache.spark.api.java.JavaSparkContext.fakeClassTag
import org.apache.spark.internal.Logging
import org.apache.spark.mllib.fpm.AssociationRules.Rule
import org.apache.spark.mllib.fpm.FPGrowth.FreqItemset
import org.apache.spark.rdd.RDD
/**
* Generates association rules from a `RDD[FreqItemset[Item]]`. This method only generates
* association rules which have a single item as the consequent.
*
*/
@Since("1.5.0")
class AssociationRules private[fpm] (
private var minConfidence: Double) extends Logging with Serializable {
/**
* Constructs a default instance with default parameters {minConfidence = 0.8}.
*/
@Since("1.5.0")
def this() = this(0.8)
/**
* Sets the minimal confidence (default: `0.8`).
*/
@Since("1.5.0")
def setMinConfidence(minConfidence: Double): this.type = {
require(minConfidence >= 0.0 && minConfidence <= 1.0,
s"Minimal confidence must be in range [0, 1] but got ${minConfidence}")
this.minConfidence = minConfidence
this
}
/**
* Computes the association rules with confidence above `minConfidence`.
* @param freqItemsets frequent itemset model obtained from [[FPGrowth]]
* @return a `RDD[Rule[Item]]` containing the association rules.
*
*/
@Since("1.5.0")
def run[Item: ClassTag](freqItemsets: RDD[FreqItemset[Item]]): RDD[Rule[Item]] = {
run(freqItemsets, Map.empty[Item, Double])
}
/**
* Computes the association rules with confidence above `minConfidence`.
* @param freqItemsets frequent itemset model obtained from [[FPGrowth]]
* @param itemSupport map containing an item and its support
* @return a `RDD[Rule[Item]]` containing the association rules. The rules will be able to
* compute also the lift metric.
*/
@Since("2.4.0")
def run[Item: ClassTag](freqItemsets: RDD[FreqItemset[Item]],
itemSupport: scala.collection.Map[Item, Double]): RDD[Rule[Item]] = {
// For candidate rule X => Y, generate (X, (Y, freq(X union Y)))
val candidates = freqItemsets.flatMap { itemset =>
val items = itemset.items
items.flatMap { item =>
items.partition(_ == item) match {
case (consequent, antecedent) if !antecedent.isEmpty =>
Some((antecedent.toSeq, (consequent.toSeq, itemset.freq)))
case _ => None
}
}
}
// Join to get (X, ((Y, freq(X union Y)), freq(X))), generate rules, and filter by confidence
candidates.join(freqItemsets.map(x => (x.items.toSeq, x.freq)))
.map { case (antecendent, ((consequent, freqUnion), freqAntecedent)) =>
new Rule(antecendent.toArray,
consequent.toArray,
freqUnion,
freqAntecedent,
// the consequent contains always only one element
itemSupport.get(consequent.head))
}.filter(_.confidence >= minConfidence)
}
/**
* Java-friendly version of `run`.
*/
@Since("1.5.0")
def run[Item](freqItemsets: JavaRDD[FreqItemset[Item]]): JavaRDD[Rule[Item]] = {
val tag = fakeClassTag[Item]
run(freqItemsets.rdd)(tag)
}
}
@Since("1.5.0")
object AssociationRules {
/**
* An association rule between sets of items.
* @param antecedent hypotheses of the rule. Java users should call [[Rule#javaAntecedent]]
* instead.
* @param consequent conclusion of the rule. Java users should call [[Rule#javaConsequent]]
* instead.
* @tparam Item item type
*
*/
@Since("1.5.0")
class Rule[Item] private[fpm] (
@Since("1.5.0") val antecedent: Array[Item],
@Since("1.5.0") val consequent: Array[Item],
freqUnion: Double,
freqAntecedent: Double,
freqConsequent: Option[Double]) extends Serializable {
/**
* Returns the confidence of the rule.
*
*/
@Since("1.5.0")
def confidence: Double = freqUnion / freqAntecedent
/**
* Returns the lift of the rule.
*/
@Since("2.4.0")
def lift: Option[Double] = freqConsequent.map(fCons => confidence / fCons)
require(antecedent.toSet.intersect(consequent.toSet).isEmpty, {
val sharedItems = antecedent.toSet.intersect(consequent.toSet)
s"A valid association rule must have disjoint antecedent and " +
s"consequent but ${sharedItems} is present in both."
})
/**
* Returns antecedent in a Java List.
*
*/
@Since("1.5.0")
def javaAntecedent: java.util.List[Item] = {
antecedent.toList.asJava
}
/**
* Returns consequent in a Java List.
*
*/
@Since("1.5.0")
def javaConsequent: java.util.List[Item] = {
consequent.toList.asJava
}
override def toString: String = {
s"${antecedent.mkString("{", ",", "}")} => " +
s"${consequent.mkString("{", ",", "}")}: (confidence: $confidence; lift: $lift)"
}
}
}
| pgandhi999/spark | mllib/src/main/scala/org/apache/spark/mllib/fpm/AssociationRules.scala | Scala | apache-2.0 | 5,833 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.api.stream.sql.validation
import org.apache.flink.api.scala._
import org.apache.flink.table.api.scala._
import org.apache.flink.table.api.{TableException, ValidationException}
import org.apache.flink.table.planner.runtime.utils.JavaUserDefinedScalarFunctions.OverAgg0
import org.apache.flink.table.planner.utils.TableTestBase
import org.apache.flink.types.Row
import org.junit.Test
class OverWindowValidationTest extends TableTestBase {
private val streamUtil = scalaStreamTestUtil()
streamUtil.addDataStream[(Int, String, Long)]("T1", 'a, 'b, 'c, 'proctime.proctime)
/**
* All aggregates must be computed on the same window.
*/
@Test(expected = classOf[TableException])
def testMultiWindow(): Unit = {
val sqlQuery = "SELECT " +
"c, " +
"count(a) OVER (PARTITION BY c ORDER BY proctime RANGE UNBOUNDED preceding), " +
"sum(a) OVER (PARTITION BY b ORDER BY proctime RANGE UNBOUNDED preceding) " +
"from T1"
streamUtil.tableEnv.sqlQuery(sqlQuery).toAppendStream[Row]
}
/**
* OVER clause is necessary for [[OverAgg0]] window function.
*/
@Test(expected = classOf[ValidationException])
def testInvalidOverAggregation(): Unit = {
streamUtil.addFunction("overAgg", new OverAgg0)
val sqlQuery = "SELECT overAgg(c, a) FROM MyTable"
streamUtil.tableEnv.sqlQuery(sqlQuery)
}
}
| fhueske/flink | flink-table/flink-table-planner-blink/src/test/scala/org/apache/flink/table/api/stream/sql/validation/OverWindowValidationTest.scala | Scala | apache-2.0 | 2,196 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.command
import org.apache.spark.SparkException
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.datasources._
/**
* A command used to write the result of a query to a directory.
*
* The syntax of using this command in SQL is:
* {{{
* INSERT OVERWRITE DIRECTORY (path=STRING)?
* USING format OPTIONS ([option1_name "option1_value", option2_name "option2_value", ...])
* SELECT ...
* }}}
*
* @param storage storage format used to describe how the query result is stored.
* @param provider the data source type to be used
* @param query the logical plan representing data to write to
* @param overwrite whthere overwrites existing directory
*/
case class InsertIntoDataSourceDirCommand(
storage: CatalogStorageFormat,
provider: String,
query: LogicalPlan,
overwrite: Boolean) extends RunnableCommand {
override def innerChildren: Seq[LogicalPlan] = query :: Nil
override def run(sparkSession: SparkSession): Seq[Row] = {
assert(storage.locationUri.nonEmpty, "Directory path is required")
assert(provider.nonEmpty, "Data source is required")
// Create the relation based on the input logical plan: `query`.
val pathOption = storage.locationUri.map("path" -> CatalogUtils.URIToString(_))
val dataSource = DataSource(
sparkSession,
className = provider,
options = storage.properties ++ pathOption,
catalogTable = None)
val isFileFormat = classOf[FileFormat].isAssignableFrom(dataSource.providingClass)
if (!isFileFormat) {
throw new SparkException(
"Only Data Sources providing FileFormat are supported: " + dataSource.providingClass)
}
val saveMode = if (overwrite) SaveMode.Overwrite else SaveMode.ErrorIfExists
try {
sparkSession.sessionState.executePlan(dataSource.planForWriting(saveMode, query)).toRdd
} catch {
case ex: AnalysisException =>
logError(s"Failed to write to directory " + storage.locationUri.toString, ex)
throw ex
}
Seq.empty[Row]
}
}
| bdrillard/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/command/InsertIntoDataSourceDirCommand.scala | Scala | apache-2.0 | 2,989 |
package org.jetbrains.plugins.scala.annotator.gutter
/**
* Pavel.Fatin, 21.01.2010
*/
class GroupMultilineControlTest extends LineMarkerTestBase {
protected override def getBasePath = super.getBasePath + "/group/multiline/control/"
def testDo = doTest
def testFor = doTest
def testIf = doTest
def testIterator = doTest
def testMatch = doTest
def testTry = doTest
def testWhile = doTest
} | LPTK/intellij-scala | test/org/jetbrains/plugins/scala/annotator/gutter/GroupMultilineControlTest.scala | Scala | apache-2.0 | 409 |
package spray.oauth.endpoints
/**
* Created by hasanozgan on 06/06/14.
*/
trait OAuth2Services extends TokenService with AuthorizeService {
val defaultOAuth2Routes = defaultTokenRoutes ~ defaultAuthorizeRoutes
}
| hasanozgan/spray-oauth | core/src/main/scala/spray/oauth/endpoints/OAuth2Services.scala | Scala | apache-2.0 | 219 |
/*
* This software is licensed under the GNU Affero General Public License, quoted below.
*
* This file is a part of PowerAPI.
*
* Copyright (C) 2011-2016 Inria, University of Lille 1.
*
* PowerAPI is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* PowerAPI is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with PowerAPI.
*
* If not, please consult http://www.gnu.org/licenses/agpl-3.0.html.
*/
package org.powerapi.sampling.cpu
import java.util.concurrent.TimeUnit
import scala.collection.JavaConverters._
import scala.concurrent.duration.{DurationLong, FiniteDuration}
import com.typesafe.config.Config
import org.joda.time.format.PeriodFormatterBuilder
import org.powerapi.core.{ConfigValue, Configuration, LinuxHelper}
/**
* Main configuration.
*
* @author <a href="mailto:[email protected]">Maxime Colmant</a>
*/
class SamplingConfiguration extends Configuration(None) {
lazy val samplingInterval: FiniteDuration = load {
_.getDuration("powerapi.sampling.interval", TimeUnit.NANOSECONDS)
} match {
case ConfigValue(value) => value.nanoseconds
case _ => 1l.seconds
}
lazy val nbSamples: Int = load {
_.getInt("powerapi.sampling.nb-samples")
} match {
case ConfigValue(value) => value
case _ => 1
}
lazy val dvfs: Boolean = load {
_.getBoolean("powerapi.sampling.dvfs")
} match {
case ConfigValue(value) => value
case _ => false
}
lazy val turbo: Boolean = load {
_.getBoolean("powerapi.sampling.turbo")
} match {
case ConfigValue(value) => value
case _ => false
}
lazy val steps: List[Int] = load {
_.getIntList("powerapi.sampling.steps")
} match {
case ConfigValue(values) => values.asScala.map(_.toInt).toList.sortWith(_ > _)
case _ => List(100, 25)
}
lazy val stepDuration: Int = load {
_.getInt("powerapi.sampling.step-duration")
} match {
case ConfigValue(value) => value
case _ => 2
}
lazy val topology: Map[Int, Set[Int]] = load { conf =>
(for (item: Config <- conf.getConfigList("powerapi.cpu.topology").asScala)
yield (item.getInt("core"), item.getIntList("indexes").asScala.map(_.toInt).toSet)).toMap
} match {
case ConfigValue(values) => values
case _ => Map()
}
lazy val events: Set[String] = load {
_.getStringList("powerapi.sampling.events")
} match {
case ConfigValue(values) => values.asScala.toSet
case _ => Set()
}
lazy val baseOutput = "output-"
lazy val powers = "powers"
lazy val outputPowers = s"$baseOutput${powers.toLowerCase.replace('_', '-').replace(':', '-')}.dat"
lazy val separator = "="
lazy val formatter = new PeriodFormatterBuilder().appendHours()
.appendSuffix("H ")
.appendMinutes()
.appendSuffix("m ")
.appendSeconds()
.appendSuffix("s ")
.appendMillis()
.appendSuffix("ms ")
.toFormatter
lazy val osHelper = new LinuxHelper
}
| Spirals-Team/powerapi | powerapi-sampling-cpu/src/main/scala/org/powerapi/sampling/cpu/SamplingConfiguration.scala | Scala | agpl-3.0 | 3,371 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.accumulo.util
import org.junit.runner.RunWith
import org.locationtech.geomesa.index.conf.splitter.DefaultSplitter
import org.locationtech.geomesa.utils.text.KVPairParser
import org.opengis.feature.simple.SimpleFeatureType
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
import scala.collection.JavaConversions._
@RunWith(classOf[JUnitRunner])
class AccumuloSftBuilderTest extends Specification {
sequential
"SpecBuilder" >> {
"configure table splitters as strings" >> {
val config = Map("id.type" -> "digit", "fmt" ->"%02d", "min" -> "0", "max" -> "99")
val sft1 = new AccumuloSftBuilder()
.intType("i")
.longType("l")
.recordSplitter(classOf[DefaultSplitter].getName, config)
.build("test")
// better - uses class directly (or at least less annoying)
val sft2 = new AccumuloSftBuilder()
.recordSplitter(classOf[DefaultSplitter], config)
.intType("i")
.longType("l")
.build("test")
def test(sft: SimpleFeatureType) = {
import org.locationtech.geomesa.utils.geotools.RichSimpleFeatureType.RichSimpleFeatureType
sft.getAttributeCount mustEqual 2
sft.getAttributeDescriptors.map(_.getLocalName) must containAllOf(List("i", "l"))
sft.getTableSplitter must beSome(classOf[DefaultSplitter])
val opts = KVPairParser.parse(sft.getTableSplitterOptions)
opts.toSeq must containTheSameElementsAs(config.toSeq)
}
List(sft1, sft2) forall test
}
}
}
| ddseapy/geomesa | geomesa-accumulo/geomesa-accumulo-datastore/src/test/scala/org/locationtech/geomesa/accumulo/util/AccumuloSftBuilderTest.scala | Scala | apache-2.0 | 2,041 |
import scala.io.StdIn
object Enclosure {
def main(args: Array[String]): Unit = {
val n = StdIn.readInt()
val ls = StdIn.readLine().split(" ").map(_.toDouble)
val r = solve(ls)
val head = ls.head
val ls2 = ls.tail.init
println("%.6f\\n%.6f\\n".format(0.0,0.0))
println("%.6f\\n%.6f\\n".format(0.0,head))
val xDiff: Double = math.tan(math.acos((head/2.0)/r)) * (head / 2.0)
val yDiff: Double = head / 2.0
ls2
.scanLeft((-xDiff, head-yDiff))((prev, l) => {
val angle = math.asin((l/2)/r) * 2.0
(math.cos(-angle) * prev._1 - math.sin(-angle) * prev._2, math.sin(-angle) * prev._1 + math.cos(-angle) * prev._2)
})
.tail
.foreach(p => println("%.6f\\n%.6f\\n".format(p._1 + xDiff, p._2 + yDiff)))
}
def solve(ls: Array[Double]): Double = {
var min: Double = 0
var max: Double = 1.6777216E7
var diff: Double = Double.MaxValue
var r: Double = 0
var count = 0
while (0.000000000000001 < math.abs(diff)) {
r = (min + max) / 2
val myPi = ls.map(l => math.asin((l/2)/r)).sum
diff = myPi - math.Pi
if (diff < 0)
max = r
else if ( diff > 0)
min = r
count += 1
}
r
}
}
| everyevery/programming_study | hackerrank/contest/2016w23/Enclosure/Enclosure.scala | Scala | mit | 1,232 |
package scrupal.utils
import org.specs2.mutable.Specification
class DomainNamesSpec extends Specification {
"DomainNames" should {
"recognize top level domain names" in {
val result = DomainNames.tldPattern.findPrefixMatchOf("com")
result.isDefined must beTrue
result.get.source must beEqualTo("com")
}
"fail to recognize bogus top level domain names" in {
val result = DomainNames.tldPattern.findPrefixMatchOf("fubar")
result.isDefined must beFalse
}
"recognize simple domain names" in {
val result = DomainNames.simpleDomainName.findPrefixMatchOf("foo.net")
result.isDefined must beTrue
result.get.source must beEqualTo("foo.net")
}
"fail to recognize non-domain names" in {
DomainNames.simpleDomainName.findPrefixMatchOf("").isDefined must beFalse
DomainNames.simpleDomainName.findPrefixMatchOf("foo net").isDefined must beFalse
DomainNames.simpleDomainName.findPrefixMatchOf("#$%.net").isDefined must beFalse
DomainNames.simpleDomainName.findPrefixMatchOf("ab_c.org").isDefined must beFalse
}
"match simple domain names" in {
"foo.net" match {
case DomainNames.simpleDomainName(top,tld) =>
top must beEqualTo("foo.net")
tld must beEqualTo("net")
case _ =>
failure("oops")
}
success("good")
}
"recognize 'reactific.com'" in {
val result = DomainNames.matchDomainName("reactific.com")
result._1 must beEqualTo(Some("reactific.com"))
result._2 must beEqualTo(None)
}
"recognize 'admin.reactific.com'" in {
DomainNames.matchDomainName("admin.reactific.com") must beEqualTo( Some("reactific.com") -> Some("admin") )
}
"recognize 'localhost'" in {
DomainNames.matchDomainName("localhost") must beEqualTo (Some("localhost") -> None )
DomainNames.matchDomainName("127.0.0.1") must beEqualTo (Some("localhost") -> None )
DomainNames.matchDomainName("::1") must beEqualTo (Some("localhost") -> None )
DomainNames.matchDomainName("fe80::1%lo0") must beEqualTo (Some("localhost") -> None )
}
"fail un unrecognized host name" in {
DomainNames.matchDomainName("bad") must beEqualTo ( None → None )
}
}
}
| scrupal/scrupal-core | scrupal-server/src/test/scala/scrupal/utils/DomainNamesSpec.scala | Scala | apache-2.0 | 2,268 |
/** MACHINE-GENERATED FROM AVRO SCHEMA. DO NOT EDIT DIRECTLY */
package example.idl
import shapeless.{:+:, CNil}
final case class Event1()
final case class Event2()
final case class Event3()
final case class Event4()
final case class ShouldRenderAsOption1(value: Option[Event1])
final case class ShouldRenderAsOption2(value: Option[Event1])
final case class ShouldRenderAsCoproduct3(value: Option[Event1 :+: Event2 :+: CNil])
final case class ShouldRenderAsCoproduct4(value: Option[Event1 :+: Event2 :+: CNil])
final case class ShouldRenderAsCoproduct5(value: Option[Event1 :+: Event2 :+: CNil])
final case class ShouldRenderAsCoproduct6(value: Option[Event1 :+: Event2 :+: Event3 :+: CNil])
final case class ShouldRenderAsCoproduct7(value: Option[Event1 :+: Event2 :+: Event3 :+: CNil])
final case class ShouldRenderAsCoproduct8(value: Option[Event1 :+: Event2 :+: Event3 :+: CNil])
final case class ShouldRenderAsCoproduct9(value: Event1 :+: Event2 :+: CNil)
final case class ShouldRenderAsCoproduct10(value: Event1 :+: Event2 :+: Event3 :+: Event4 :+: CNil)
final case class ShouldRenderAsCoproduct11(value: Event1 :+: Event2 :+: Event3 :+: Event4 :+: CNil) | julianpeeters/avrohugger | avrohugger-core/src/test/expected/standard/example/idl/AllUnionsWithOptionShapelessCoproduct.scala | Scala | apache-2.0 | 1,176 |
package com.basrikahveci
package cardgame.messaging.request
import cardgame.messaging.Request
import cardgame.core.{OnlineUsers, Session}
import cardgame.domain.User
class InviteUserRequest(val invitedUserId: Long) extends Request with OnlineUsers {
def handle(session: Session, user: User) = inviteFriend(user, invitedUserId)
}
| metanet/cardgame-server-scala | src/main/scala/com/basrikahveci/cardgame/messaging/request/InviteUserRequest.scala | Scala | mit | 333 |
// Copyright (C) 2011-2012 the original author or authors.
// See the LICENCE.txt file distributed with this work for additional
// information regarding copyright ownership.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package controllerTraits
import connectors.APIConnector
import models.UserRegister
import forms.UserRegisterForm._
import play.api.i18n.Messages
import play.api.i18n.Messages.Implicits._
import play.api.Play.current
import play.api.mvc.{Action, Controller}
import play.api.libs.ws.WSResponse
import scala.concurrent.Future
import scala.concurrent.ExecutionContext.Implicits.global
trait RegisterCtrl extends Controller {
val apiConnector : APIConnector
val register = Action.async { implicit request =>
val urform = RegisterUserForm.fill(new UserRegister("","","","","",""))
Future.successful(Ok(views.html.register(urform)))
}
val submit = Action.async { implicit request =>
RegisterUserForm.bindFromRequest().fold(
formWithErrors => {
Future.successful(BadRequest(views.html.register(formWithErrors)))
},
accData => {
apiConnector.createAccount(accData).flatMap {
case ws : WSResponse => ws.status match {
case INTERNAL_SERVER_ERROR => Future.successful(Ok(views.html.error_template(Messages("cjww.auth.error.generic"))))
case CREATED => Future.successful(Ok(views.html.success("individual")))
case _ => Future.successful(Ok(views.html.error_template(Messages("cjww.auth.error.generic"))))
}
case _ => Future.successful(Ok(views.html.error_template(Messages("cjww.auth.error.generic"))))
}
}
)
}
}
| chrisjwwalker/cjww-auth-service | app/controllerTraits/RegisterCtrl.scala | Scala | apache-2.0 | 2,175 |
/*
* Copyright (C) 2012 Mikołaj Sochacki mikolajsochacki AT gmail.com
* This file is part of VRegister (Virtual Register)
* Apache License Version 2.0, January 2004 http://www.apache.org/licenses/
*/
package eu.brosbit.opos.lib
import javax.mail._
import javax.mail.internet._
import _root_.net.liftweb.util._
import _root_.net.liftweb.common._
import _root_.eu.brosbit.opos.model.MapExtraData
class MailConfig {
/**For configure mail inside snippet */
def configureMailer(host: String, port: String, user: String, password: String) {
this.host = host
this.user = user
this.port = port
this.password = password
mkConfig()
}
/** For configure in bootstrap if database contain data configuration*/
def autoConfigure(){
loadConfig()
mkConfig()
}
def getConfig() = {
loadConfig()
(host, port, user,password)
}
private var host = ""
private var user = ""
private var password = ""
private var port = ""
private def loadConfig() = {
host = ConfigLoader.emailSMTP
port = ConfigLoader.emailPort
user = ConfigLoader.emailAddr
password = ConfigLoader.emailPassw
}
private def mkConfig(){
/*
println("[App Info]: mkConfig in Mailer user %s password %s host %s port %s".format(user, password, host, port))
// Enable TLS support
System.setProperty("mail.smtp.starttls.enable", "true");
// Set the host name
System.setProperty("mail.smtp.host", this.host) // Enable authentication
System.setProperty("mail.smtp.starttls.enable", "true");
System.setProperty("mail.smtp.port", this.port)
println("HOST NAME ::::: " + System.getProperty("mail.smtp.host") + " PORT ::: " + System.getProperty("mail.smtp.port"))
*/
Mailer.customProperties = Map(
"mail.smtp.host" -> "smtp.gmail.com",
"mail.smtp.port" -> "587",
"mail.smtp.auth" -> "true",
"mail.smtp.starttls.enable" -> "true")
// Provide a means for authentication. Pass it a Can, which can either be Full or Empty
Mailer.authenticator = Full(new Authenticator {
override def getPasswordAuthentication = new PasswordAuthentication( user, password)
})
}
}
| mikolajs/osp | src/main/scala/eu/brosbit/opos/lib/MailConfig.scala | Scala | agpl-3.0 | 2,292 |
// Copyright: 2010 - 2016 https://github.com/ensime/ensime-server/graphs
// License: http://www.gnu.org/licenses/gpl-3.0.en.html
package org.ensime.core
import akka.actor._
import com.google.common.io.ByteStreams
import java.io.{ File, IOException }
import java.util.jar.JarFile
import org.ensime.api._
class DocResolver(
prefix: String,
forceJavaVersion: Option[String] // for testing
)(
implicit
config: EnsimeConfig
) extends Actor with ActorLogging with DocUsecaseHandling {
var htmlToJar = Map.empty[String, File]
var jarNameToJar = Map.empty[String, File]
var docTypes = Map.empty[String, DocType]
sealed trait DocType
case object Javadoc extends DocType
case object Javadoc8 extends DocType
case object Scaladoc extends DocType
// In javadoc docs, index.html has a comment that reads 'Generated by javadoc'
private val JavadocComment = """Generated by javadoc (?:\\(([0-9\\.]+))?""".r.unanchored
override def preStart(): Unit = {
// On initialisation, do a fast scan (< 1s for 50 jars) to determine
// the package contents of each jar, and whether it's a javadoc or
// scaladoc.
for (
jarFile <- config.allDocJars if jarFile.exists()
) {
try {
val jar = new JarFile(jarFile)
val jarFileName = jarFile.getName
jarNameToJar += jarFileName -> jarFile
docTypes += (jarFileName -> Scaladoc)
val enumEntries = jar.entries()
while (enumEntries.hasMoreElements) {
val entry = enumEntries.nextElement()
if (!entry.isDirectory) {
val f = new File(entry.getName)
val dir = f.getParent
if (dir != null) {
htmlToJar += entry.getName -> jarFile
}
// Check for javadocs
if (entry.getName == "index.html") {
val bytes = ByteStreams.toByteArray(jar.getInputStream(entry))
new String(bytes) match {
case JavadocComment(version: String) if version.startsWith("1.8") =>
docTypes += jarFileName -> Javadoc8
case JavadocComment(_*) =>
docTypes += jarFileName -> Javadoc
case _ =>
}
}
}
}
} catch {
case e: IOException =>
// continue regardless
log.error(e, "Failed to process doc jar: " + jarFile.getName)
}
}
}
private def javaFqnToPath(fqn: DocFqn): String = {
if (fqn.typeName == "package") {
fqn.pack.replace(".", "/") + "/package-summary.html"
} else {
fqn.pack.replace(".", "/") + "/" + fqn.typeName + ".html"
}
}
def scalaFqnToPath(fqn: DocFqn): String = {
if (fqn.typeName == "package") {
fqn.pack.replace(".", "/") + "/package.html"
} else fqn.pack.replace(".", "/") + "/" + fqn.typeName + ".html"
}
private def makeLocalUri(jar: File, sig: DocSigPair): String = {
val jarName = jar.getName
val docType = docTypes(jarName)
val java = docType == Javadoc || docType == Javadoc8
if (java) {
val path = javaFqnToPath(sig.java.fqn)
val anchor = sig.java.member.map { s =>
"#" + { if (docType == Javadoc8) toJava8Anchor(s) else s }
}.getOrElse("")
s"$prefix/$jarName/$path$anchor"
} else {
val scalaSig = maybeReplaceWithUsecase(jar, sig.scala)
val anchor = scalaSig.fqn.mkString +
scalaSig.member.map("@" + _).getOrElse("")
s"$prefix/$jarName/index.html#$anchor"
}
}
private val PackRegexp = """^((?:[a-z0-9]+\\.)+)""".r
private def guessJar(sig: DocSigPair): Option[(File, DocSigPair)] = {
val scalafqn = scalaFqnToPath(sig.scala.fqn)
val javafqn = javaFqnToPath(sig.java.fqn)
val scala = htmlToJar.get(scalafqn).map((_, sig))
val scala2 = scala.orElse(
htmlToJar.get(scalafqn.replace("$.html", ".html")).map({ file =>
// Documentation for Object doesn't exists but documentation for Class does
val typeName = sig.scala.fqn.typeName.replaceFirst("\\\\$$", "")
val sigOfClass = sig.copy(scala = sig.scala.copy(fqn = sig.scala.fqn.copy(typeName = typeName)))
(file, sigOfClass)
})
)
scala2.orElse(htmlToJar.get(javafqn).map((_, sig)))
}
private def resolveLocalUri(sig: DocSigPair): Option[String] = {
guessJar(sig) match {
case Some((jar, sig)) =>
Some(makeLocalUri(jar, sig))
case _ =>
log.debug(s"Failed to resolve doc jar for: $sig")
None
}
}
// Javadoc 8 changed the anchor format to remove illegal
// url characters: parens, commas, brackets.
// See https://bugs.eclipse.org/bugs/show_bug.cgi?id=432056
// and https://bugs.openjdk.java.net/browse/JDK-8025633
private val Java8Chars = """(?:,|\\(|\\)|\\[\\])""".r
private def toJava8Anchor(anchor: String): String = {
Java8Chars.replaceAllIn(anchor, { m =>
anchor(m.start) match {
case ',' => "-"
case '(' => "-"
case ')' => "-"
case '[' => ":A"
}
})
}
private def toAndroidAnchor(anchor: String): String = anchor.replace(",", ", ")
private def resolveWellKnownUri(sig: DocSigPair): Option[String] = {
if (sig.java.fqn.javaStdLib) {
val path = javaFqnToPath(sig.java.fqn)
val rawVersion = forceJavaVersion.getOrElse(scala.util.Properties.javaVersion)
val version =
if (rawVersion.startsWith("1.8")) "8" else if (rawVersion.startsWith("1.7")) "7" else "6"
val anchor = sig.java.member.map {
m => "#" + { if (version == "8") toJava8Anchor(m) else m }
}.getOrElse("")
Some(s"http://docs.oracle.com/javase/$version/docs/api/$path$anchor")
} else if (sig.java.fqn.androidStdLib) {
val path = javaFqnToPath(sig.java.fqn)
val anchor = sig.java.member.map { m => "#" + toAndroidAnchor(m) }.getOrElse("")
Some(s"http://developer.android.com/reference/$path$anchor")
} else None
}
def resolve(sig: DocSigPair): Option[String] = resolveLocalUri(sig) orElse resolveWellKnownUri(sig)
// for java stuff, really
def resolve(sig: DocSig): Option[String] = resolve(DocSigPair(sig, sig))
def receive: Receive = {
case p: DocSigPair =>
val response = resolve(p) match {
case Some(path) => StringResponse(path)
case None => FalseResponse
}
sender() ! response
}
}
object DocResolver {
def apply(
prefix: String = "docs",
java: Option[String] = None
)(
implicit
config: EnsimeConfig
): Props = Props(classOf[DocResolver], prefix, java, config)
}
| sugakandrey/ensime-server | core/src/main/scala/org/ensime/core/DocResolver.scala | Scala | gpl-3.0 | 6,584 |
package org.jetbrains.plugins.scala.worksheet.cell
import com.intellij.openapi.components.AbstractProjectComponent
import com.intellij.openapi.project.Project
import com.intellij.openapi.util.text.StringUtil
import com.intellij.psi.{PsiComment, PsiElement, PsiFile, PsiWhiteSpace}
import org.jetbrains.plugins.scala.lang.psi.api.ScalaFile
import org.jetbrains.plugins.scala.worksheet.settings.WorksheetFileSettings
import scala.collection.{mutable, _}
/**
* User: Dmitry.Naydanov
*/
class BasicCellManager(project: Project) extends AbstractProjectComponent(project) with CellManager {
private var cells = mutable.WeakHashMap.empty[PsiFile, mutable.TreeMap[Int, CellDescriptor]]
override def canHaveCells(file: PsiFile): Boolean = file match {
case scalaFile: ScalaFile if scalaFile.isWorksheetFile =>
refreshMarkers(file)
WorksheetFileSettings.getRunType(scalaFile).isUsesCell
case _ => false
}
/**
* Comment should be at top level and start at new line
*
* @return
*/
override def isStartCell(element: PsiElement): Boolean = element match { //todo
case comment: PsiComment =>
refreshMarkers(element.getContainingFile)
cells.get(comment.getContainingFile).exists(_.get(comment.getTextOffset).exists(compare(element)))
case _ => false
}
override def getCellFor(startElement: PsiElement): Option[CellDescriptor] = {
refreshMarkers(startElement.getContainingFile)
cells.get(startElement.getContainingFile).flatMap(_.get(startElement.getTextOffset).filter(compare(startElement)))
}
override def canStartCell(element: PsiElement): Boolean = processRawElement(element, check)
override def processProbablyStartElement(element: PsiElement): Boolean = processRawElement(element, checkAndAdd)
override def getCells(file: PsiFile): Iterable[CellDescriptor] = cells.get(file).map(_.values).getOrElse(Seq.empty)
override def getCell(file: PsiFile, offset: Int): Option[CellDescriptor] = {
cells.get(file).flatMap(
fileCells => fileCells.rangeImpl(None, Some(offset - 1)).lastOption.map(_._2)
)
}
override def getNextCell(cellDescriptor: CellDescriptor): Option[CellDescriptor] = {
getForRange(
cellDescriptor,
cellDescriptor.getElement.map(_.getTextOffset + CellManager.CELL_START_MARKUP.length),
None
).flatMap(_.headOption).map(_._2)
}
override def getPrevCell(cellDescriptor: CellDescriptor): Option[CellDescriptor] = {
//todo
getForRange(
cellDescriptor,
None,
cellDescriptor.getElement.map(_.getTextOffset - 1)
).flatMap(_.lastOption).map(_._2)
}
override def clearAll(): Unit = {
cells.clear()
}
override def clear(file: PsiFile): Unit = {
cells.remove(file)
}
override def projectClosed(): Unit = {
cells = null
}
private def compare(element: PsiElement)(descriptor: CellDescriptor): Boolean = descriptor.getElement.contains(element)
private def processRawElement(element: PsiElement, processor: (PsiComment, PsiFile) => Boolean): Boolean =
(element, element.getParent) match {
case (comment: PsiComment, file: PsiFile) => processor(comment, file)
case (comment: PsiComment, owner: PsiElement)
if owner.getParent.isInstanceOf[PsiFile] && owner.getTextOffset == comment.getTextOffset =>
processor(comment, owner.getContainingFile)
case _ => false
}
private def refreshMarkers(file: PsiFile): Unit = {
if (!WorksheetFileSettings.getRunType(file).isUsesCell) cells.remove(file)
}
private def getSameFileCells(cellDescriptor: CellDescriptor): Option[mutable.TreeMap[Int, CellDescriptor]] =
cellDescriptor.getElement.flatMap { element => cells.get(element.getContainingFile) }
private def getForRange(cellDescriptor: CellDescriptor, start: Option[Int], end: Option[Int]) = {
getSameFileCells(cellDescriptor).map(_.rangeImpl(start, end))
}
private def checkComment(comment: PsiComment): Boolean = comment.getText.startsWith(CellManager.CELL_START_MARKUP)
private def isStartsNewLine(comment: PsiComment, file: PsiFile): Boolean = {
val offset = comment.getTextRange.getStartOffset
(offset == 0) || (file.findElementAt(offset - 1) match {
case ws: PsiWhiteSpace => ws.getTextOffset == 0 || StringUtil.containsLineBreak(ws.getText)
case _ => false
})
}
private def check(comment: PsiComment, file: PsiFile): Boolean =
canHaveCells(file) && checkComment(comment) && isStartsNewLine(comment, file)
private def checkAndAdd(comment: PsiComment, file: PsiFile): Boolean = canHaveCells(file) && checkComment(comment) && {
def store(): Boolean = {
val offset = comment.getTextOffset
val runType = WorksheetFileSettings.getRunType(file)
cells.get(file) match {
case Some(fileCells) =>
if (offset < fileCells.last._1) {
fileCells.clear()
}
fileCells.put(offset, createCellDescriptor(comment, runType))
case _ =>
cells.put(file, mutable.TreeMap((offset, createCellDescriptor(comment, runType))))
}
true
}
isStartsNewLine(comment, file) && store()
}
} | jastice/intellij-scala | scala/scala-impl/src/org/jetbrains/plugins/scala/worksheet/cell/BasicCellManager.scala | Scala | apache-2.0 | 5,188 |
/*
* Copyright 2001-2008 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import java.awt.AWTError
import java.lang.annotation._
import java.io.Serializable
import java.lang.reflect.Constructor
import java.lang.reflect.InvocationTargetException
import java.lang.reflect.Method
import java.lang.reflect.Modifier
import java.nio.charset.CoderMalfunctionError
import javax.xml.parsers.FactoryConfigurationError
import javax.xml.transform.TransformerFactoryConfigurationError
import Suite.simpleNameForTest
import Suite.parseSimpleName
import Suite.stripDollars
import Suite.formatterForSuiteStarting
import Suite.formatterForSuiteCompleted
import Suite.formatterForSuiteAborted
import Suite.anErrorThatShouldCauseAnAbort
import Suite.getSimpleNameOfAnObjectsClass
import scala.collection.immutable.TreeSet
import org.scalatest.events._
import org.scalatest.tools.StandardOutReporter
/**
* A suite of tests. A <code>Suite</code> instance encapsulates a conceptual
* suite (<em>i.e.</em>, a collection) of tests.
*
* <p>
* This trait provides an interface that allows suites of tests to be run.
* Its implementation enables a default way of writing and executing tests. Subtraits and subclasses can
* override <code>Suite</code>'s methods to enable other ways of writing and executing tests.
* This trait's default approach allows tests to be defined as methods whose name starts with "<code>test</code>."
* This approach is easy to understand, and a good way for Scala beginners to start writing tests.
* More advanced Scala programmers may prefer to mix together other <code>Suite</code> subtraits defined in ScalaTest,
* or create their own, to write tests in the way they feel makes them most productive. Here's a quick overview
* of some of the options to help you get started:
* </p>
*
* <p>
* <em>For JUnit 3 users</em>
* </p>
*
* <p>
* If you are using JUnit 3 (version 3.8 or earlier releases) and you want to write JUnit 3 tests in Scala, look at
* <a href="junit/AssertionsForJUnit.html"><code>AssertionsForJUnit</code></a>,
* <a href="junit/ShouldMatchersForJUnit.html"><code>ShouldMatchersForJUnit</code></a>, and
* <a href="junit/JUnit3Suite.html"><code>JUnit3Suite</code></a>.
* </p>
*
* <p>
* <em>For JUnit 4 users</em>
* </p>
*
* <p>
* If you are using JUnit 4 and you want to write JUnit 4 tests in Scala, look at
* <a href="junit/JUnitSuite.html"><code>JUnitSuite</code></a>, and
* <a href="junit/JUnitRunner.html"><code>JUnitRunner</code></a>. With <code>JUnitRunner</code>,
* you can use any of the traits described here and still run your tests with JUnit 4.
* </p>
*
* <p>
* <em>For TestNG users</em>
* </p>
*
* <p>
* If you are using TestNG and you want to write TestNG tests in Scala, look at
* <a href="testng/TestNGSuite.html"><code>TestNGSuite</code></a>.
* </p>
*
* <p>
* <em>For high-level testing</em>
* </p>
*
* <p>
* If you want to write tests at a higher level than unit tests, such as integration tests, acceptance tests,
* or functional tests, check out <a href="FeatureSpec.html"><code>FeatureSpec</code></a>.
* </p>
*
* <p>
* <em>For unit testing</em>
* </p>
*
* <p>
* If you prefer a behavior-driven development (BDD) style, in which tests are combined with text that
* specifies the behavior being tested, look at
* <a href="Spec.html"><code>Spec</code></a>,
* <a href="FlatSpec.html"><code>FlatSpec</code></a>, and
* <a href="WordSpec.html"><code>WordSpec</code></a>. Otherwise, if you just want to write tests
* and don't want to combine testing with specifying, look at
* <a href="FunSuite.html"><code>FunSuite</code></a> or read on to learn how to write
* tests using this base trait, <code>Suite</code>.
* </p>
*
* <p>
* To use this trait's approach to writing tests, simply create classes that
* extend <code>Suite</code> and define test methods. Test methods have names of the form <code>testX</code>,
* where <code>X</code> is some unique, hopefully meaningful, string. A test method must be public and
* can have any result type, but the most common result type is <code>Unit</code>. Here's an example:
* </p>
*
* <pre>
* import org.scalatest.Suite
*
* class MySuite extends Suite {
*
* def testAddition() {
* val sum = 1 + 1
* assert(sum === 2)
* assert(sum + 2 === 4)
* }
*
* def testSubtraction() {
* val diff = 4 - 1
* assert(diff === 3)
* assert(diff - 2 === 1)
* }
* }
* </pre>
*
* <p>
* You can run a <code>Suite</code> by invoking on it one of four overloaded <code>execute</code>
* methods. These methods, which print test results to the
* standard output, are intended to serve as a
* convenient way to run tests from within the Scala interpreter. For example,
* to run <code>MySuite</code> from within the Scala interpreter, you could write:
* </p>
*
* <pre>
* scala> (new MySuite).execute()
* </pre>
*
* <p>
* And you would see:
* </p>
*
* <pre>
* Test Starting - MySuite: testAddition
* Test Succeeded - MySuite: testAddition
* Test Starting - MySuite: testSubtraction
* Test Succeeded - MySuite: testSubtraction
* </pre>
*
* <p>
* Or, to run just the <code>testAddition</code> method, you could write:
* </p>
*
* <pre>
* scala> (new MySuite).execute("testAddition")
* </pre>
*
* <p>
* And you would see:
* </p>
*
* <pre>
* Test Starting - MySuite: testAddition
* Test Succeeded - MySuite: testAddition
* </pre>
*
* <p>
* Two other <code>execute</code> methods that are intended to be run from the interpreter accept a "config" map of key-value
* pairs (see <a href="#configMapSection">Config map</a>, below). Each of these <code>execute</code> methods invokes a <code>run</code> method takes seven
* parameters. This <code>run</code> method, which actually executes the suite, will usually be invoked by a test runner, such
* as <code>org.scalatest.tools.Runner</code> or an IDE. See the <a href="tools/Runner$.html">documentation
* for <code>Runner</code></a> for more detail.
* </p>
*
* <p>
* <strong>Assertions and ===</strong>
* </p>
*
* <p>
* Inside test methods in a <code>Suite</code>, you can write assertions by invoking <code>assert</code> and passing in a <code>Boolean</code> expression,
* such as:
* </p>
*
* <pre>
* val left = 2
* val right = 1
* assert(left == right)
* </pre>
*
* <p>
* If the passed expression is <code>true</code>, <code>assert</code> will return normally. If <code>false</code>,
* <code>assert</code> will complete abruptly with a <code>TestFailedException</code>. This exception is usually not caught
* by the test method, which means the test method itself will complete abruptly by throwing the <code>TestFailedException</code>. Any
* test method that completes abruptly with a <code>TestFailedException</code> or any <code>Exception</code> is considered a failed
* test. A test method that returns normally is considered a successful test.
* </p>
*
* <p>
* If you pass a <code>Boolean</code> expression to <code>assert</code>, a failed assertion will be reported, but without
* reporting the left and right values. You can alternatively encode these values in a <code>String</code> passed as
* a second argument to <code>assert</code>, as in:
* </p>
*
* <pre>
* val left = 2
* val right = 1
* assert(left == right, left + " did not equal " + right)
* </pre>
*
* <p>
* Using this form of <code>assert</code>, the failure report will include the left and right values, thereby
* helping you debug the problem. However, ScalaTest provides the <code>===</code> operator to make this easier.
* (The <code>===</code> operator is defined in trait <a href="Assertions.html"><code>Assertions</code></a> which trait <code>Suite</code> extends.)
* You use it like this:
* </p>
*
* <pre>
* val left = 2
* val right = 1
* assert(left === right)
* </pre>
*
* <p>
* Because you use <code>===</code> here instead of <code>==</code>, the failure report will include the left
* and right values. For example, the detail message in the thrown <code>TestFailedException</code> from the <code>assert</code>
* shown previously will include, "2 did not equal 1".
* From this message you will know that the operand on the left had the value 2, and the operand on the right had the value 1.
* </p>
*
* <p>
* If you're familiar with JUnit, you would use <code>===</code>
* in a ScalaTest <code>Suite</code> where you'd use <code>assertEquals</code> in a JUnit <code>TestCase</code>.
* The <code>===</code> operator is made possible by an implicit conversion from <code>Any</code>
* to <code>Equalizer</code>. If you're curious to understand the mechanics, see the <a href="Assertions$Equalizer.html">documentation for
* <code>Equalizer</code></a> and the <code>convertToEqualizer</code> method.
* </p>
*
* <p>
* <strong>Expected results</strong>
* </p>
*
* Although <code>===</code> provides a natural, readable extension to Scala's <code>assert</code> mechanism,
* as the operands become lengthy, the code becomes less readable. In addition, the <code>===</code> comparison
* doesn't distinguish between actual and expected values. The operands are just called <code>left</code> and <code>right</code>,
* because if one were named <code>expected</code> and the other <code>actual</code>, it would be difficult for people to
* remember which was which. To help with these limitations of assertions, <code>Suite</code> includes a method called <code>expect</code> that
* can be used as an alternative to <code>assert</code> with <code>===</code>. To use <code>expect</code>, you place
* the expected value in parentheses after <code>expect</code>, followed by curly braces containing code
* that should result in the expected value. For example:
*
* <pre>
* val a = 5
* val b = 2
* expect(2) {
* a - b
* }
* </pre>
*
* <p>
* In this case, the expected value is <code>2</code>, and the code being tested is <code>a - b</code>. This expectation will fail, and
* the detail message in the <code>TestFailedException</code> will read, "Expected 2, but got 3."
* </p>
*
* <p>
* <strong>Intercepted exceptions</strong>
* </p>
*
* <p>
* Sometimes you need to test whether a method throws an expected exception under certain circumstances, such
* as when invalid arguments are passed to the method. You can do this in the JUnit style, like this:
* </p>
*
* <pre>
* val s = "hi"
* try {
* s.charAt(-1)
* fail()
* }
* catch {
* case _: IndexOutOfBoundsException => // Expected, so continue
* }
* </pre>
*
* <p>
* If <code>charAt</code> throws <code>IndexOutOfBoundsException</code> as expected, control will transfer
* to the catch case, which does nothing. If, however, <code>charAt</code> fails to throw an exception,
* the next statement, <code>fail()</code>, will be executed. The <code>fail</code> method always completes abruptly with
* a <code>TestFailedException</code>, thereby signaling a failed test.
* </p>
*
* <p>
* To make this common use case easier to express and read, ScalaTest provides an <code>intercept</code>
* method. You use it like this:
* </p>
*
* <pre>
* val s = "hi"
* intercept[IndexOutOfBoundsException] {
* s.charAt(-1)
* }
* </pre>
*
* <p>
* This code behaves much like the previous example. If <code>charAt</code> throws an instance of <code>IndexOutOfBoundsException</code>,
* <code>intercept</code> will return that exception. But if <code>charAt</code> completes normally, or throws a different
* exception, <code>intercept</code> will complete abruptly with a <code>TestFailedException</code>. The <code>intercept</code> method returns the
* caught exception so that you can inspect it further if you wish, for example, to ensure that data contained inside
* the exception has the expected values. Here's an example:
* </p>
*
* <pre>
* val s = "hi"
* val caught =
* intercept[IndexOutOfBoundsException] {
* s.charAt(-1)
* }
* assert(caught.getMessage === "String index out of range: -1")
* </pre>
*
* <p>
* <strong>Using other assertions</strong>
* </p>
*
* <p>
* ScalaTest also supports another style of assertions via its matchers DSL. By mixing in
* trait <a href="matchers/ShouldMatchers.html"><code>ShouldMatchers</code></a>, you can
* write suites that look like:
* </p>
*
* <pre>
* import org.scalatest.Suite
* import org.scalatest.matchers.ShouldMatchers
*
* class MySuite extends Suite with ShouldMatchers {
*
* def testAddition() {
* val sum = 1 + 1
* sum should equal (2)
* sum + 2 should equal (4)
* }
*
* def testSubtraction() {
* val diff = 4 - 1
* diff should equal (3)
* diff - 2 should equal (1)
* }
* }
* </pre>
*
* <p>If you prefer the word "<code>must</code>" to the word "<code>should</code>," you can alternatively mix in
* trait <a href="matchers/MustMatchers.html"><code>MustMatchers</code></a>.
* </p>
*
* <p>
* If you are comfortable with assertion mechanisms from other test frameworks, chances
* are you can use them with ScalaTest. Any assertion mechanism that indicates a failure with an exception
* can be used as is with ScalaTest. For example, to use the <code>assertEquals</code>
* methods provided by JUnit or TestNG, simply import them and use them. (You will of course need
* to include the relevant JAR file for the framework whose assertions you want to use on either the
* classpath or runpath when you run your tests.) Here's an example in which JUnit's assertions are
* imported, then used within a ScalaTest suite:
* </p>
*
* <pre>
* import org.scalatest.Suite
* import org.junit.Assert._
*
* class MySuite extends Suite {
*
* def testAddition() {
* val sum = 1 + 1
* assertEquals(2, sum)
* assertEquals(4, sum + 2)
* }
*
* def testSubtraction() {
* val diff = 4 - 1
* assertEquals(3, diff)
* assertEquals(1, diff - 2)
* }
* }
* </pre>
*
* <p>
* <strong>Nested suites</strong>
* </p>
*
* <p>
* A <code>Suite</code> can refer to a collection of other <code>Suite</code>s,
* which are called <em>nested</em> <code>Suite</code>s. Those nested <code>Suite</code>s can in turn have
* their own nested <code>Suite</code>s, and so on. Large test suites can be organized, therefore, as a tree of
* nested <code>Suite</code>s.
* This trait's <code>run</code> method, in addition to invoking its
* test methods, invokes <code>run</code> on each of its nested <code>Suite</code>s.
* </p>
*
* <p>
* A <code>List</code> of a <code>Suite</code>'s nested <code>Suite</code>s can be obtained by invoking its
* <code>nestedSuites</code> method. If you wish to create a <code>Suite</code> that serves as a
* container for nested <code>Suite</code>s, whether or not it has test methods of its own, simply override <code>nestedSuites</code>
* to return a <code>List</code> of the nested <code>Suite</code>s. Because this is a common use case, ScalaTest provides
* a convenience <code>SuperSuite</code> class, which takes a <code>List</code> of nested <code>Suite</code>s as a constructor
* parameter. Here's an example:
* </p>
*
* <pre>
* import org.scalatest.Suite
*
* class ASuite extends Suite
* class BSuite extends Suite
* class CSuite extends Suite
*
* class AlphabetSuite extends SuperSuite(
* List(
* new ASuite,
* new BSuite,
* new CSuite
* )
* )
* </pre>
*
* <p>
* If you now run <code>AlphabetSuite</code>, for example from the interpreter:
* </p>
*
* <pre>
* scala> (new AlphabetSuite).run()
* </pre>
*
* <p>
* You will see reports printed to the standard output that indicate nested
* suites—<code>ASuite</code>, <code>BSuite</code>, and
* <code>CSuite</code>—were run.
* </p>
*
* <p>
* Note that <code>Runner</code> can discover <code>Suite</code>s automatically, so you need not
* necessarily specify <code>SuperSuite</code>s explicitly. See the <a href="tools/Runner$.html">documentation
* for <code>Runner</code></a> for more information.
* </p>
*
* <p>
* <strong>Shared fixtures</strong>
* </p>
*
* <p>
* A test <em>fixture</em> is objects or other artifacts (such as files, sockets, database
* connections, etc.) used by tests to do their work.
* If a fixture is used by only one test method, then the definitions of the fixture objects can
* be local to the method, such as the objects assigned to <code>sum</code> and <code>diff</code> in the
* previous <code>MySuite</code> examples. If multiple methods need to share an immutable fixture, one approach
* is to assign them to instance variables. Here's a (very contrived) example, in which the object assigned
* to <code>shared</code> is used by multiple test methods:
* </p>
*
* <pre>
* import org.scalatest.Suite
*
* class MySuite extends Suite {
*
* // Sharing immutable fixture objects via instance variables
* val shared = 5
*
* def testAddition() {
* val sum = 2 + 3
* assert(sum === shared)
* }
*
* def testSubtraction() {
* val diff = 7 - 2
* assert(diff === shared)
* }
* }
* </pre>
*
* <p>
* In some cases, however, shared <em>mutable</em> fixture objects may be changed by test methods such that
* they need to be recreated or reinitialized before each test. Shared resources such
* as files or database connections may also need to
* be created and initialized before, and cleaned up after, each test. JUnit 3 offers methods <code>setUp</code> and
* <code>tearDown</code> for this purpose. In ScalaTest, you can use the <code>BeforeAndAfterEach</code> trait,
* which will be described later, to implement an approach similar to JUnit's <code>setUp</code>
* and <code>tearDown</code>, however, this approach usually involves reassigning <code>var</code>s
* between tests. Before going that route, you may wish to consider some approaches that
* avoid <code>var</code>s. One approach is to write one or more <em>create-fixture</em> methods
* that return a new instance of a needed object (or a tuple or case class holding new instances of
* multiple objects) each time it is called. You can then call a create-fixture method at the beginning of each
* test method that needs the fixture, storing the fixture object or objects in local variables. Here's an example:
* </p>
*
* <pre>
* import org.scalatest.Suite
* import scala.collection.mutable.ListBuffer
*
* class MySuite extends Suite {
*
* // create objects needed by tests and return as a tuple
* def createFixture = (
* new StringBuilder("ScalaTest is "),
* new ListBuffer[String]
* )
*
* def testEasy() {
* val (builder, lbuf) = createFixture
* builder.append("easy!")
* assert(builder.toString === "ScalaTest is easy!")
* assert(lbuf.isEmpty)
* lbuf += "sweet"
* }
*
* def testFun() {
* val (builder, lbuf) = createFixture
* builder.append("fun!")
* assert(builder.toString === "ScalaTest is fun!")
* assert(lbuf.isEmpty)
* }
* }
* </pre>
*
* <p>
* If different tests in the same <code>Suite</code> require different fixtures, you can create multiple create-fixture methods and
* call the method (or methods) needed by each test at the begining of the test. If every test method requires the same set of
* mutable fixture objects, one other approach you can take is make them simply <code>val</code>s and mix in trait
* <a href="OneInstancePerTest.html"><code>OneInstancePerTest</code></a>. If you mix in <code>OneInstancePerTest</code>, each test
* will be run in its own instance of the <code>Suite</code>, similar to the way JUnit tests are executed.
* </p>
*
* <p>
* Although the create-fixture and <code>OneInstancePerTest</code> approaches take care of setting up a fixture before each
* test, they don't address the problem of cleaning up a fixture after the test completes. In this situation,
* one option is to mix in the <a href="BeforeAndAfterEach.html"><code>BeforeAndAfterEach</code></a> trait.
* <code>BeforeAndAfterEach</code>'s <code>beforeEach</code> method will be run before, and its <code>afterEach</code>
* method after, each test (like JUnit's <code>setUp</code> and <code>tearDown</code>
* methods, respectively).
* For example, you could create a temporary file before each test, and delete it afterwords, like this:
* </p>
*
* <pre>
* import org.scalatest.Suite
* import org.scalatest.BeforeAndAfterEach
* import java.io.FileReader
* import java.io.FileWriter
* import java.io.File
*
* class MySuite extends Suite with BeforeAndAfterEach {
*
* private val FileName = "TempFile.txt"
* private var reader: FileReader = _
*
* // Set up the temp file needed by the test
* override def beforeEach() {
* val writer = new FileWriter(FileName)
* try {
* writer.write("Hello, test!")
* }
* finally {
* writer.close()
* }
*
* // Create the reader needed by the test
* reader = new FileReader(FileName)
* }
*
* // Close and delete the temp file
* override def afterEach() {
* reader.close()
* val file = new File(FileName)
* file.delete()
* }
*
* def testReadingFromTheTempFile() {
* var builder = new StringBuilder
* var c = reader.read()
* while (c != -1) {
* builder.append(c.toChar)
* c = reader.read()
* }
* assert(builder.toString === "Hello, test!")
* }
*
* def testFirstCharOfTheTempFile() {
* assert(reader.read() === 'H')
* }
*
* def testWithoutAFixture() {
* assert(1 + 1 === 2)
* }
* }
* </pre>
*
* <p>
* In this example, the instance variable <code>reader</code> is a <code>var</code>, so
* it can be reinitialized between tests by the <code>beforeEach</code> method.
* </p>
*
* <p>
* Although the <code>BeforeAndAfterEach</code> approach should be familiar to the users of most
* test other frameworks, ScalaTest provides another alternative that also allows you to perform cleanup
* after each test: overriding <code>withFixture(NoArgTest)</code>.
* To execute each test, <code>Suite</code>'s implementation of the <code>runTest</code> method wraps an invocation
* of the appropriate test method in a no-arg function. <code>runTest</code> passes that test function to the <code>withFixture(NoArgTest)</code>
* method, which is responsible for actually running the test by invoking the function. <code>Suite</code>'s
* implementation of <code>withFixture(NoArgTest)</code> simply invokes the function, like this:
* </p>
*
* <pre>
* // Default implementation
* protected def withFixture(test: NoArgTest) {
* test()
* }
* </pre>
*
* <p>
* The <code>withFixture(NoArgTest)</code> method exists so that you can override it and set a fixture up before, and clean it up after, each test.
* Thus, the previous temp file example could also be implemented without mixing in <code>BeforeAndAfterEach</code>, like this:
* </p>
*
* <pre>
* import org.scalatest.Suite
* import java.io.FileReader
* import java.io.FileWriter
* import java.io.File
*
* class MySuite extends Suite {
*
* private var reader: FileReader = _
*
* override def withFixture(test: NoArgTest) {
*
* val FileName = "TempFile.txt"
*
* // Set up the temp file needed by the test
* val writer = new FileWriter(FileName)
* try {
* writer.write("Hello, test!")
* }
* finally {
* writer.close()
* }
*
* // Create the reader needed by the test
* reader = new FileReader(FileName)
*
* try {
* test() // Invoke the test function
* }
* finally {
* // Close and delete the temp file
* reader.close()
* val file = new File(FileName)
* file.delete()
* }
* }
*
* def testReadingFromTheTempFile() {
* var builder = new StringBuilder
* var c = reader.read()
* while (c != -1) {
* builder.append(c.toChar)
* c = reader.read()
* }
* assert(builder.toString === "Hello, test!")
* }
*
* def testFirstCharOfTheTempFile() {
* assert(reader.read() === 'H')
* }
*
* def testWithoutAFixture() {
* assert(1 + 1 === 2)
* }
* }
* </pre>
*
* <p>
* If you prefer to keep your test classes immutable, one final variation is to use the
* <a href="fixture/FixtureSuite.html"><code>FixtureSuite</code></a> trait from the
* <code>org.scalatest.fixture</code> package. Tests in an <code>org.scalatest.fixture.FixtureSuite</code> can have a fixture
* object passed in as a parameter. You must indicate the type of the fixture object
* by defining the <code>Fixture</code> type member and define a <code>withFixture</code> method that takes a <em>one-arg</em> test function.
* (A <code>FixtureSuite</code> has two overloaded <code>withFixture</code> methods, therefore, one that takes a <code>OneArgTest</code>
* and the other, inherited from <code>Suite</code>, that takes a <code>NoArgTest</code>.)
* Inside the <code>withFixture(OneArgTest)</code> method, you create the fixture, pass it into the test function, then perform any
* necessary cleanup after the test function returns. Instead of invoking each test directly, a <code>FixtureSuite</code> will
* pass a function that invokes the code of a test to <code>withFixture(OneArgTest)</code>. Your <code>withFixture(OneArgTest)</code> method, therefore,
* is responsible for actually running the code of the test by invoking the test function.
* For example, you could pass the temp file reader fixture to each test that needs it
* by overriding the <code>withFixture(OneArgTest)</code> method of a <code>FixtureSuite</code>, like this:
* </p>
*
* <pre>
* import org.scalatest.fixture.FixtureSuite
* import java.io.FileReader
* import java.io.FileWriter
* import java.io.File
*
* class MySuite extends FixtureSuite {
*
* // No vars needed in this one
*
* type FixtureParam = FileReader
*
* def withFixture(test: OneArgTest) {
*
* val FileName = "TempFile.txt"
*
* // Set up the temp file needed by the test
* val writer = new FileWriter(FileName)
* try {
* writer.write("Hello, test!")
* }
* finally {
* writer.close()
* }
*
* // Create the reader needed by the test
* val reader = new FileReader(FileName)
*
* try {
* // Run the test, passing in the temp file reader
* test(reader)
* }
* finally {
* // Close and delete the temp file
* reader.close()
* val file = new File(FileName)
* file.delete()
* }
* }
*
* def testReadingFromTheTempFile(reader: FileReader) {
* var builder = new StringBuilder
* var c = reader.read()
* while (c != -1) {
* builder.append(c.toChar)
* c = reader.read()
* }
* assert(builder.toString === "Hello, test!")
* }
*
* def testFirstCharOfTheTempFile(reader: FileReader) {
* assert(reader.read() === 'H')
* }
*
* def testWithoutAFixture() {
* assert(1 + 1 === 2)
* }
* }
* </pre>
*
* <p>
* It is worth noting that the only difference in the test code between the mutable
* <code>BeforeAndAfterEach</code> approach shown previously and the immutable <code>FixtureSuite</code>
* approach shown here is that two of the <code>FixtureSuite</code>'s test methods take a <code>FileReader</code> as
* a parameter. Otherwise the test code is identical. One benefit of the explicit parameter is that, as demonstrated
* by the <code>testWithoutAFixture</code> method, a <code>FixtureSuite</code>
* test method need not take the fixture. (Tests that don't take a fixture as a parameter are passed to the <code>withFixture</code>
* that takes a <code>NoArgTest</code>, shown previously.) So you can have some tests that take a fixture, and others that don't.
* In this case, the <code>FixtureSuite</code> provides documentation indicating which
* test methods use the fixture and which don't, whereas the <code>BeforeAndAfterEach</code> approach does not.
* </p>
*
* <p>
* If you want to execute code before and after all tests (and nested suites) in a suite, such
* as you could do with <code>@BeforeClass</code> and <code>@AfterClass</code>
* annotations in JUnit 4, you can use the <code>beforeAll</code> and <code>afterAll</code>
* methods of <code>BeforeAndAfterAll</code>. See the documentation for <code>BeforeAndAfterAll</code> for
* an example.
* </p>
*
* <p>
* <a name="configMapSection"><strong>The config map</strong></a>
* </p>
*
* <p>
* In some cases you may need to pass information to a suite of tests.
* For example, perhaps a suite of tests needs to grab information from a file, and you want
* to be able to specify a different filename during different runs. You can accomplish this in ScalaTest by passing
* the filename in the <em>config</em> map of key-value pairs, which is passed to <code>run</code> as a <code>Map[String, Any]</code>.
* The values in the config map are called "config objects," because they can be used to <em>configure</em>
* suites, reporters, and tests.
* </p>
*
* <p>
* You can specify a string config object is via the ScalaTest <code>Runner</code>, either via the command line
* or ScalaTest's ant task.
* (See the <a href="tools/Runner$.html#configMapSection">documentation for Runner</a> for information on how to specify
* config objects on the command line.)
* The config map is passed to <code>run</code>, <code>runNestedSuites</code>, <code>runTests</code>, and <code>runTest</code>,
* so one way to access it in your suite is to override one of those methods. If you need to use the config map inside your tests, you
* can use one of the traits in the <code>org.scalatest.fixture</code> package. (See the
* <a href="fixture/FixtureSuite.html">documentation for <code>FixtureSuite</code></a>
* for instructions on how to access the config map in tests.)
* </p>
*
* <p>
* <strong>Tagging tests</strong>
* </p>
*
* <p>
* A <code>Suite</code>'s tests may be classified into groups by <em>tagging</em> them with string names. When executing
* a <code>Suite</code>, groups of tests can optionally be included and/or excluded. In this
* trait's implementation, tags are indicated by annotations attached to the test method. To
* create a new tag type to use in <code>Suite</code>s, simply define a new Java annotation that itself is annotated with the <code>org.scalatest.TagAnnotation</code> annotation.
* (Currently, for annotations to be
* visible in Scala programs via Java reflection, the annotations themselves must be written in Java.) For example,
* to create a tag named <code>SlowAsMolasses</code>, to use to mark slow tests, you would
* write in Java:
* </p>
*
* <pre>
* import java.lang.annotation.*;
* import org.scalatest.TagAnnotation
*
* @TagAnnotation
* @Retention(RetentionPolicy.RUNTIME)
* @Target({ElementType.METHOD, ElementType.TYPE})
* public @interface SlowAsMolasses {}
* </pre>
*
* <p>
* Given this new annotation, you could place a <code>Suite</code> test method into the <code>SlowAsMolasses</code> group
* (<em>i.e.</em>, tag it as being <code>SlowAsMolasses</code>) like this:
* </p>
*
* <pre>
* @SlowAsMolasses
* def testSleeping() = sleep(1000000)
* </pre>
*
* <p>
* The primary <code>run</code> method takes a <code>Filter</code>, whose constructor takes an optional
* <code>Set[String]</code>s called <code>tagsToInclude</code> and a <code>Set[String]</code> called
* <code>tagsToExclude</code>. If <code>tagsToInclude</code> is <code>None</code>, all tests will be run
* except those those belonging to tags listed in the
* <code>tagsToExclude</code> <code>Set</code>. If <code>tagsToInclude</code> is defined, only tests
* belonging to tags mentioned in the <code>tagsToInclude</code> set, and not mentioned in <code>tagsToExclude</code>,
* will be run.
* </p>
*
* <p>
* <strong>Note, the <code>TagAnnotation</code> annotation was introduced in ScalaTest 1.0, when "groups" were renamed
* to "tags." In 1.0 and 1.1, the <code>TagAnnotation</code> will continue to not be required by an annotation on a <code>Suite</code>
* method. Any annotation on a <code>Suite</code> method will be considered a tag until 1.2, to give users time to add
* <code>TagAnnotation</code>s on any tag annotations they made prior to the 1.0 release. From 1.2 onward, only annotations
* themselves annotated by <code>TagAnnotation</code> will be considered tag annotations.</strong>
* </p>
*
* <p>
* <strong>Ignored tests</strong>
* </p>
*
* <p>
* Another common use case is that tests must be “temporarily” disabled, with the
* good intention of resurrecting the test at a later time. ScalaTest provides an <code>Ignore</code>
* annotation for this purpose. You use it like this:
* </p>
*
* <pre>
* import org.scalatest.Suite
* import org.scalatest.Ignore
*
* class MySuite extends Suite {
*
* def testAddition() {
* val sum = 1 + 1
* assert(sum === 2)
* assert(sum + 2 === 4)
* }
*
* @Ignore
* def testSubtraction() {
* val diff = 4 - 1
* assert(diff === 3)
* assert(diff - 2 === 1)
* }
* }
* </pre>
*
* <p>
* If you run this version of <code>MySuite</code> with:
* </p>
*
* <pre>
* scala> (new MySuite).run()
* </pre>
*
* <p>
* It will run only <code>testAddition</code> and report that <code>testSubtraction</code> was ignored. You'll see:
* </p>
*
* <pre>
* Test Starting - MySuite: testAddition
* Test Succeeded - MySuite: testAddition
* Test Ignored - MySuite: testSubtraction
* </pre>
*
* <p>
* <code>Ignore</code> is implemented as a tag. The <code>Filter</code> class effectively
* adds <code>org.scalatest.Ignore</code> to the <code>tagsToExclude</code> <code>Set</code> if it not already
* in the <code>tagsToExclude</code> set passed to its primary constructor. The only difference between
* <code>org.scalatest.Ignore</code> and the tags you may define and exclude is that ScalaTest reports
* ignored tests to the <code>Reporter</code>. The reason ScalaTest reports ignored tests is as a feeble
* attempt to encourage ignored tests to be eventually fixed and added back into the active suite of tests.
* </p>
*
* <p>
* <strong>Pending tests</strong>
* </p>
*
* <p>
* A <em>pending test</em> is one that has been given a name but is not yet implemented. The purpose of
* pending tests is to facilitate a style of testing in which documentation of behavior is sketched
* out before tests are written to verify that behavior (and often, the before the behavior of
* the system being tested is itself implemented). Such sketches form a kind of specification of
* what tests and functionality to implement later.
* </p>
*
* <p>
* To support this style of testing, a test can be given a name that specifies one
* bit of behavior required by the system being tested. The test can also include some code that
* sends more information about the behavior to the reporter when the tests run. At the end of the test,
* it can call method <code>pending</code>, which will cause it to complete abruptly with <code>TestPendingException</code>.
* Because tests in ScalaTest can be designated as pending with <code>TestPendingException</code>, both the test name and any information
* sent to the reporter when running the test can appear in the report of a test run. (In other words,
* the code of a pending test is executed just like any other test.) However, because the test completes abruptly
* with <code>TestPendingException</code>, the test will be reported as pending, to indicate
* the actual test, and possibly the functionality it is intended to test, has not yet been implemented.
* </p>
*
* <p>
* Although pending tests may be used more often in specification-style suites, such as
* <code>org.scalatest.Spec</code>, you can also use it in <code>Suite</code>, like this:
* </p>
*
* <pre>
* import org.scalatest.Suite
*
* class MySuite extends Suite {
*
* def testAddition() {
* val sum = 1 + 1
* assert(sum === 2)
* assert(sum + 2 === 4)
* }
*
* def testSubtraction() { pending }
* }
* </pre>
*
* <p>
* If you run this version of <code>MySuite</code> with:
* </p>
*
* <pre>
* scala> (new MySuite).run()
* </pre>
*
* <p>
* It will run both tests but report that <code>testSubtraction</code> is pending. You'll see:
* </p>
*
* <pre>
* Test Starting - MySuite: testAddition
* Test Succeeded - MySuite: testAddition
* Test Starting - MySuite: testSubtraction
* Test Pending - MySuite: testSubtraction
* </pre>
*
* <p>
* <strong>Informers</strong>
* </p>
*
* <p>
* One of the parameters to the primary <code>run</code> method is an <code>Reporter</code>, which
* will collect and report information about the running suite of tests.
* Information about suites and tests that were run, whether tests succeeded or failed,
* and tests that were ignored will be passed to the <code>Reporter</code> as the suite runs.
* Most often the reporting done by default by <code>Suite</code>'s methods will be sufficient, but
* occasionally you may wish to provide custom information to the <code>Reporter</code> from a test method.
* For this purpose, you can optionally include an <code>Informer</code> parameter in a test method, and then
* pass the extra information to the <code>Informer</code> via its <code>apply</code> method. The <code>Informer</code>
* will then pass the information to the <code>Reporter</code> by sending an <code>InfoProvided</code> event.
* Here's an example:
* </p>
*
* <pre>
* import org.scalatest._
*
* class MySuite extends Suite {
* def testAddition(info: Informer) {
* assert(1 + 1 === 2)
* info("Addition seems to work")
* }
* }
* </pre>
*
* If you run this <code>Suite</code> from the interpreter, you will see the message
* included in the printed report:
*
* <pre>
* scala> (new MySuite).run()
* Test Starting - MySuite: testAddition(Reporter)
* Info Provided - MySuite: testAddition(Reporter)
* Addition seems to work
* Test Succeeded - MySuite: testAddition(Reporter)
* </pre>
*
* <p>
* <strong>Executing suites in parallel</strong>
* </p>
*
* <p>
* The primary <code>run</code> method takes as its last parameter an optional <code>Distributor</code>. If
* a <code>Distributor</code> is passed in, this trait's implementation of <code>run</code> puts its nested
* <code>Suite</code>s into the distributor rather than executing them directly. The caller of <code>run</code>
* is responsible for ensuring that some entity runs the <code>Suite</code>s placed into the
* distributor. The <code>-c</code> command line parameter to <code>Runner</code>, for example, will cause
* <code>Suite</code>s put into the <code>Distributor</code> to be run in parallel via a pool of threads.
* </p>
*
* <p>
* <strong>Treatement of <code>java.lang.Error</code>s</strong>
* </p>
*
* <p>
* The Javadoc documentation for <code>java.lang.Error</code> states:
* </p>
*
* <blockquote>
* An <code>Error</code> is a subclass of <code>Throwable</code> that indicates serious problems that a reasonable application should not try to catch. Most
* such errors are abnormal conditions.
* </blockquote>
*
* <p>
* Because <code>Error</code>s are used to denote serious errors, trait <code>Suite</code> and its subtypes in the ScalaTest API do not always treat a test
* that completes abruptly with an <code>Error</code> as a test failure, but sometimes as an indication that serious problems
* have arisen that should cause the run to abort. For example, if a test completes abruptly with an <code>OutOfMemoryError</code>,
* it will not be reported as a test failure, but will instead cause the run to abort. Because not everyone uses <code>Error</code>s only to represent serious
* problems, however, ScalaTest only behaves this way for the following exception types (and their subclasses):
* <p>
*
* <ul>
* <li><code>java.lang.annotation.AnnotationFormatError</code></li>
* <li><code>java.awt.AWTError</code></li>
* <li><code>java.nio.charset.CoderMalfunctionError</code></li>
* <li><code>javax.xml.parsers.FactoryConfigurationError</code></li>
* <li><code>java.lang.LinkageError</code></li>
* <li><code>java.lang.ThreadDeath</code></li>
* <li><code>javax.xml.transform.TransformerFactoryConfigurationError</code></li>
* <li><code>java.lang.VirtualMachineError</code></li>
* </ul>
*
* <p>
* The previous list includes all <code>Error</code>s that exist as part of Java 1.5 API, excluding <code>java.lang.AssertionError</code>. ScalaTest
* does treat a thrown <code>AssertionError</code> as an indication of a test failure. In addition, any other <code>Error</code> that is not an instance of a
* type mentioned in the previous list will be caught by the <code>Suite</code> traits in the ScalaTest API and reported as the cause of a test failure.
* </p>
*
* <p>
* Although trait <code>Suite</code> and all its subtypes in the ScalaTest API consistently behave this way with regard to <code>Error</code>s,
* this behavior is not required by the contract of <code>Suite</code>. Subclasses and subtraits that you define, for example, may treat all
* <code>Error</code>s as test failures, or indicate errors in some other way that has nothing to do with exceptions.
* </p>
*
* <p>
* <strong>Extensibility</strong>
* </p>
*
* <p>
* Trait <code>Suite</code> provides default implementations of its methods that should
* be sufficient for most applications, but many methods can be overridden when desired. Here's
* a summary of the methods that are intended to be overridden:
* </p>
*
* <ul>
* <li><code>run</code> - override this method to define custom ways to run suites of
* tests.</li>
* <li><code>runNestedSuites</code> - override this method to define custom ways to run nested suites.</li>
* <li><code>runTests</code> - override this method to define custom ways to run a suite's tests.</li>
* <li><code>runTest</code> - override this method to define custom ways to run a single named test.</li>
* <li><code>testNames</code> - override this method to specify the <code>Suite</code>'s test names in a custom way.</li>
* <li><code>tags</code> - override this method to specify the <code>Suite</code>'s test tags in a custom way.</li>
* <li><code>nestedSuites</code> - override this method to specify the <code>Suite</code>'s nested <code>Suite</code>s in a custom way.</li>
* <li><code>suiteName</code> - override this method to specify the <code>Suite</code>'s name in a custom way.</li>
* <li><code>expectedTestCount</code> - override this method to count this <code>Suite</code>'s expected tests in a custom way.</li>
* </ul>
*
* <p>
* For example, this trait's implementation of <code>testNames</code> performs reflection to discover methods starting with <code>test</code>,
* and places these in a <code>Set</code> whose iterator returns the names in alphabetical order. If you wish to run tests in a different
* order in a particular <code>Suite</code>, perhaps because a test named <code>testAlpha</code> can only succeed after a test named
* <code>testBeta</code> has run, you can override <code>testNames</code> so that it returns a <code>Set</code> whose iterator returns
* <code>testBeta</code> <em>before</em> <code>testAlpha</code>. (This trait's implementation of <code>run</code> will invoke tests
* in the order they come out of the <code>testNames</code> <code>Set</code> iterator.)
* </p>
*
* <p>
* Alternatively, you may not like starting your test methods with <code>test</code>, and prefer using <code>@Test</code> annotations in
* the style of Java's JUnit 4 or TestNG. If so, you can override <code>testNames</code> to discover tests using either of these two APIs
* <code>@Test</code> annotations, or one of your own invention. (This is in fact
* how <code>org.scalatest.junit.JUnitSuite</code> and <code>org.scalatest.testng.TestNGSuite</code> work.)
* </p>
*
* <p>
* Moreover, <em>test</em> in ScalaTest does not necessarily mean <em>test method</em>. A test can be anything that can be given a name,
* that starts and either succeeds or fails, and can be ignored. In <code>org.scalatest.FunSuite</code>, for example, tests are represented
* as function values. This
* approach might look foreign to JUnit users, but may feel more natural to programmers with a functional programming background.
* To facilitate this style of writing tests, <code>FunSuite</code> overrides <code>testNames</code>, <code>runTest</code>, and <code>run</code> such that you can
* define tests as function values.
* </p>
*
* <p>
* You can also model existing JUnit 3, JUnit 4, or TestNG tests as suites of tests, thereby incorporating tests written in Java into a ScalaTest suite.
* The "wrapper" classes in packages <code>org.scalatest.junit</code> and <code>org.scalatest.testng</code> exist to make this easy.
* No matter what legacy tests you may have, it is likely you can create or use an existing <code>Suite</code> subclass that allows you to model those tests
* as ScalaTest suites and tests and incorporate them into a ScalaTest suite. You can then write new tests in Scala and continue supporting
* older tests in Java.
* </p>
*
* @author Bill Venners
*/
@serializable
trait Suite extends Assertions with AbstractSuite { thisSuite =>
import Suite.TestMethodPrefix, Suite.InformerInParens, Suite.IgnoreAnnotation
/*
* @param nestedSuites A <CODE>List</CODE> of <CODE>Suite</CODE>
* objects. The specified <code>List</code> must be non-empty. Each element must be non-<code>null</code> and an instance
* of <CODE>org.scalatest.Suite</CODE>.
*
* @throws NullPointerException if <CODE>nestedSuites</CODE>
* is <CODE>null</CODE> or any element of <CODE>nestedSuites</CODE>
* set is <CODE>null</CODE>.
*/
/**
* A test function taking no arguments, which also provides a test name and config map.
*
* <p>
* <code>Suite</code>'s implementation of <code>runTest</code> passes instances of this trait
* to <code>withFixture</code> for every test method it executes. It invokes <code>withFixture</code>
* for every test, including test methods that take an <code>Informer</code>. For the latter case,
* the <code>Informer</code> to pass to the test method is already contained inside the
* <code>NoArgTest</code> instance passed to <code>withFixture</code>.
* </p>
*/
protected trait NoArgTest extends (() => Unit) {
/**
* The name of this test.
*/
def name: String
/**
* Runs the code of the test.
*/
def apply()
/**
* A <code>Map[String, Any]</code> containing objects that can be used
* to configure the fixture and test.
*/
def configMap: Map[String, Any]
}
// should nestedSuites return a Set[String] instead?
/**
* A <code>List</code> of this <code>Suite</code> object's nested <code>Suite</code>s. If this <code>Suite</code> contains no nested <code>Suite</code>s,
* this method returns an empty <code>List</code>. This trait's implementation of this method returns an empty <code>List</code>.
*/
def nestedSuites: List[Suite] = Nil
/**
* Executes this <code>Suite</code>, printing results to the standard output.
*
* <p>
* This method implementation calls <code>run</code> on this <code>Suite</code>, passing in:
* </p>
*
* <ul>
* <li><code>testName</code> - <code>None</code></li>
* <li><code>reporter</code> - a reporter that prints to the standard output</li>
* <li><code>stopper</code> - a <code>Stopper</code> whose <code>apply</code> method always returns <code>false</code></li>
* <li><code>filter</code> - a <code>Filter</code> constructed with <code>None</code> for <code>tagsToInclude</code> and <code>Set()</code>
* for <code>tagsToExclude</code></li>
* <li><code>configMap</code> - an empty <code>Map[String, Any]</code></li>
* <li><code>distributor</code> - <code>None</code></li>
* <li><code>tracker</code> - a new <code>Tracker</code></li>
* </ul>
*
* <p>
* This method serves as a convenient way to execute a <code>Suite</code>, especially from
* within the Scala interpreter.
* </p>
*
* <p>
* Note: In ScalaTest, the terms "execute" and "run" basically mean the same thing and
* can be used interchangably. The reason this convenience method and its three overloaded forms
* aren't named <code>run</code>
* is because <code>junit.framework.TestCase</code> declares a <code>run</code> method
* that takes no arguments but returns a <code>junit.framework.TestResult</code>. That
* <code>run</code> method would not overload with this method if it were named <code>run</code>,
* because it would have the same parameters but a different return type than the one
* defined in <code>TestCase</code>. To facilitate integration with JUnit 3, therefore,
* these convenience "run" methods are named <code>execute</code>. In particular, this allows trait
* <code>org.scalatest.junit.JUnit3Suite</code> to extend both <code>org.scalatest.Suite</code> and
* <code>junit.framework.TestCase</code>, which enables the creating of classes that
* can be run with either ScalaTest or JUnit 3.
* </p>
*/
final def execute() {
run(None, new StandardOutReporter, new Stopper {}, Filter(), Map(), None, new Tracker)
}
/**
* Executes this <code>Suite</code> with the specified <code>configMap</code>, printing results to the standard output.
*
* <p>
* This method implementation calls <code>run</code> on this <code>Suite</code>, passing in:
* </p>
*
* <ul>
* <li><code>testName</code> - <code>None</code></li>
* <li><code>reporter</code> - a reporter that prints to the standard output</li>
* <li><code>stopper</code> - a <code>Stopper</code> whose <code>apply</code> method always returns <code>false</code></li>
* <li><code>filter</code> - a <code>Filter</code> constructed with <code>None</code> for <code>tagsToInclude</code> and <code>Set()</code>
* for <code>tagsToExclude</code></li>
* <li><code>configMap</code> - the specified <code>configMap</code> <code>Map[String, Any]</code></li>
* <li><code>distributor</code> - <code>None</code></li>
* <li><code>tracker</code> - a new <code>Tracker</code></li>
* </ul>
*
* <p>
* This method serves as a convenient way to execute a <code>Suite</code>, passing in some objects via the <code>configMap</code>, especially from within the Scala interpreter.
* </p>
*
* <p>
* Note: In ScalaTest, the terms "execute" and "run" basically mean the same thing and
* can be used interchangably. The reason this convenience method and its three overloaded forms
* aren't named <code>run</code> is described the documentation of the overloaded form that
* takes no parameters: <a href="#execute%28%29">execute()</a>.
* </p>
*
* @param configMap a <code>Map</code> of key-value pairs that can be used by the executing <code>Suite</code> of tests.
*
* @throws NullPointerException if the passed <code>configMap</code> parameter is <code>null</code>.
*/
final def execute(configMap: Map[String, Any]) {
run(None, new StandardOutReporter, new Stopper {}, Filter(), configMap, None, new Tracker)
}
/**
* Executes the test specified as <code>testName</code> in this <code>Suite</code>, printing results to the standard output.
*
* <p>
* This method implementation calls <code>run</code> on this <code>Suite</code>, passing in:
* </p>
*
* <ul>
* <li><code>testName</code> - <code>Some(testName)</code></li>
* <li><code>reporter</code> - a reporter that prints to the standard output</li>
* <li><code>stopper</code> - a <code>Stopper</code> whose <code>apply</code> method always returns <code>false</code></li>
* <li><code>filter</code> - a <code>Filter</code> constructed with <code>None</code> for <code>tagsToInclude</code> and <code>Set()</code>
* for <code>tagsToExclude</code></li>
* <li><code>configMap</code> - an empty <code>Map[String, Any]</code></li>
* <li><code>distributor</code> - <code>None</code></li>
* <li><code>tracker</code> - a new <code>Tracker</code></li>
* </ul>
*
* <p>
* This method serves as a convenient way to run a single test, especially from within the Scala interpreter.
* </p>
*
* <p>
* Note: In ScalaTest, the terms "execute" and "run" basically mean the same thing and
* can be used interchangably. The reason this convenience method and its three overloaded forms
* aren't named <code>run</code> is described the documentation of the overloaded form that
* takes no parameters: <a href="#execute%28%29">execute()</a>.
* </p>
*
* @param testName the name of one test to run.
*
* @throws NullPointerException if the passed <code>testName</code> parameter is <code>null</code>.
* @throws IllegalArgumentException if <code>testName</code> is defined, but no test with the specified test name
* exists in this <code>Suite</code>
*/
final def execute(testName: String) {
run(Some(testName), new StandardOutReporter, new Stopper {}, Filter(), Map(), None, new Tracker)
}
/**
* Executes the test specified as <code>testName</code> in this <code>Suite</code> with the specified <code>configMap</code>, printing
* results to the standard output.
*
* <p>
* This method implementation calls <code>run</code> on this <code>Suite</code>, passing in:
* </p>
*
* <ul>
* <li><code>testName</code> - <code>Some(testName)</code></li>
* <li><code>reporter</code> - a reporter that prints to the standard output</li>
* <li><code>stopper</code> - a <code>Stopper</code> whose <code>apply</code> method always returns <code>false</code></li>
* <li><code>filter</code> - a <code>Filter</code> constructed with <code>None</code> for <code>tagsToInclude</code> and <code>Set()</code>
* for <code>tagsToExclude</code></li>
* <li><code>configMap</code> - the specified <code>configMap</code> <code>Map[String, Any]</code></li>
* <li><code>distributor</code> - <code>None</code></li>
* <li><code>tracker</code> - a new <code>Tracker</code></li>
* </ul>
*
* <p>
* This method serves as a convenient way to execute a single test, passing in some objects via the <code>configMap</code>, especially from
* within the Scala interpreter.
* </p>
*
* <p>
* Note: In ScalaTest, the terms "execute" and "run" basically mean the same thing and
* can be used interchangably. The reason this convenience method and its three overloaded forms
* aren't named <code>run</code> is described the documentation of the overloaded form that
* takes no parameters: <a href="#execute%28%29">execute()</a>.
* </p>
*
* @param testName the name of one test to run.
* @param configMap a <code>Map</code> of key-value pairs that can be used by the executing <code>Suite</code> of tests.
*
* @throws NullPointerException if either of the passed <code>testName</code> or <code>configMap</code> parameters is <code>null</code>.
* @throws IllegalArgumentException if <code>testName</code> is defined, but no test with the specified test name
* exists in this <code>Suite</code>
*/
final def execute(testName: String, configMap: Map[String, Any]) {
run(Some(testName), new StandardOutReporter, new Stopper {}, Filter(), configMap, None, new Tracker)
}
/**
* A <code>Map</code> whose keys are <code>String</code> tag names with which tests in this <code>Suite</code> are marked, and
* whose values are the <code>Set</code> of test names marked with each tag. If this <code>Suite</code> contains no tags, this
* method returns an empty <code>Map</code>.
*
* <p>
* This trait's implementation of this method uses Java reflection to discover any Java annotations attached to its test methods. The
* fully qualified name of each unique annotation that extends <code>TagAnnotation</code> is considered a tag. This trait's
* implementation of this method, therefore, places one key/value pair into to the
* <code>Map</code> for each unique tag annotation name discovered through reflection. The mapped value for each tag name key will contain
* the test method name, as provided via the <code>testNames</code> method.
* </p>
*
* <p>
* Subclasses may override this method to define and/or discover tags in a custom manner, but overriding method implementations
* should never return an empty <code>Set</code> as a value. If a tag has no tests, its name should not appear as a key in the
* returned <code>Map</code>.
* </p>
*
* <p>
* <strong>Note, the <code>TagAnnotation</code> annotation was introduced in ScalaTest 1.0, when "groups" were renamed
* to "tags." In 1.0 and 1.1, the <code>TagAnnotation</code> will continue to not be required by an annotation on a <code>Suite</code>
* method. Any annotation on a <code>Suite</code> method will be considered a tag until 1.2, to give users time to add
* <code>TagAnnotation</code>s on any tag annotations they made prior to the 1.0 release. From 1.2 onward, only annotations
* themselves annotated by <code>TagAnnotation</code> will be considered tag annotations.</strong>
* </p>
*/
def tags: Map[String, Set[String]] = {
def getTags(testName: String) =
/* AFTER THE DEPRECATION CYCLE FOR GROUPS TO TAGS (1.2), REPLACE THE FOLLOWING FOR LOOP WITH THIS COMMENTED OUT ONE
THAT MAKES SURE ANNOTATIONS ARE TAGGED WITH TagAnnotation.
for {
a <- getMethodForTestName(testName).getDeclaredAnnotations
annotationClass = a.annotationType
if annotationClass.isAnnotationPresent(classOf[TagAnnotation])
} yield annotationClass.getName
*/
for (a <- getMethodForTestName(testName).getDeclaredAnnotations)
yield a.annotationType.getName
val elements =
for (testName <- testNames; if !getTags(testName).isEmpty)
yield testName -> (Set() ++ getTags(testName))
Map() ++ elements
}
/**
* <strong>The <code>groups</code> methods has been deprecated and will be removed in a future version of ScalaTest.
* Please call (and override) <code>tags</code> instead.</strong>
*/
@deprecated
final def groups: Map[String, Set[String]] = tags
/**
* An <code>Set</code> of test names. If this <code>Suite</code> contains no tests, this method returns an empty <code>Set</code>.
*
* <p>
* This trait's implementation of this method uses Java reflection to discover all public methods whose name starts with <code>"test"</code>,
* which take either nothing or a single <code>Informer</code> as parameters. For each discovered test method, it assigns a test name
* comprised of just the method name if the method takes no parameters, or the method name plus <code>(Informer)</code> if the
* method takes a <code>Informer</code>. Here are a few method signatures and the names that this trait's implementation assigns them:
* </p>
*
* <pre>
* def testCat() {} // test name: "testCat"
* def testCat(Informer) {} // test name: "testCat(Informer)"
* def testDog() {} // test name: "testDog"
* def testDog(Informer) {} // test name: "testDog(Informer)"
* def test() {} // test name: "test"
* def test(Informer) {} // test name: "test(Informer)"
* </pre>
*
* <p>
* This trait's implementation of this method returns an immutable <code>Set</code> of all such names, excluding the name
* <code>testNames</code>. The iterator obtained by invoking <code>elements</code> on this
* returned <code>Set</code> will produce the test names in their <em>natural order</em>, as determined by <code>String</code>'s
* <code>compareTo</code> method.
* </p>
*
* <p>
* This trait's implementation of <code>runTests</code> invokes this method
* and calls <code>runTest</code> for each test name in the order they appear in the returned <code>Set</code>'s iterator.
* Although this trait's implementation of this method returns a <code>Set</code> whose iterator produces <code>String</code>
* test names in a well-defined order, the contract of this method does not required a defined order. Subclasses are free to
* override this method and return test names in an undefined order, or in a defined order that's different from <code>String</code>'s
* natural order.
* </p>
*
* <p>
* Subclasses may override this method to produce test names in a custom manner. One potential reason to override <code>testNames</code> is
* to run tests in a different order, for example, to ensure that tests that depend on other tests are run after those other tests.
* Another potential reason to override is allow tests to be defined in a different manner, such as methods annotated <code>@Test</code> annotations
* (as is done in <code>JUnitSuite</code> and <code>TestNGSuite</code>) or test functions registered during construction (as is
* done in <code>FunSuite</code> and <code>Spec</code>).
* </p>
*/
def testNames: Set[String] = {
def takesInformer(m: Method) = {
val paramTypes = m.getParameterTypes
paramTypes.length == 1 && classOf[Informer].isAssignableFrom(paramTypes(0))
}
def isTestMethod(m: Method) = {
val isInstanceMethod = !Modifier.isStatic(m.getModifiers())
// name must have at least 4 chars (minimum is "test")
val simpleName = m.getName
val firstFour = if (simpleName.length >= 4) simpleName.substring(0, 4) else ""
val paramTypes = m.getParameterTypes
val hasNoParams = paramTypes.length == 0
// Discover testNames(Informer) because if we didn't it might be confusing when someone
// actually wrote a testNames(Informer) method and it was silently ignored.
val isTestNames = simpleName == "testNames"
isInstanceMethod && (firstFour == "test") && ((hasNoParams && !isTestNames) || takesInformer(m))
}
val testNameArray =
for (m <- getClass.getMethods; if isTestMethod(m))
yield if (takesInformer(m)) m.getName + InformerInParens else m.getName
TreeSet[String]() ++ testNameArray
}
private def testMethodTakesInformer(testName: String) = testName.endsWith(InformerInParens)
private def getMethodForTestName(testName: String) =
getClass.getMethod(
simpleNameForTest(testName),
(if (testMethodTakesInformer(testName)) Array(classOf[Informer]) else new Array[Class[_]](0)): _*
)
/**
* Run the passed test function in the context of a fixture established by this method.
*
* <p>
* This method should set up the fixture needed by the tests of the
* current suite, invoke the test function, and if needed, perform any clean
* up needed after the test completes. Because the <code>NoArgTest</code> function
* passed to this method takes no parameters, preparing the fixture will require
* side effects, such as reassigning instance <code>var</code>s in this <code>Suite</code> or initializing
* a globally accessible external database. If you want to avoid reassigning instance <code>var</code>s
* you can use <a href="fixture/FixtureSuite.html">FixtureSuite</a>.
* </p>
*
* <p>
* This trait's implementation of <code>runTest</code> invokes this method for each test, passing
* in a <code>NoArgTest</code> whose <code>apply</code> method will execute the code of the test.
* </p>
*
* <p>
* This trait's implementation of this method simply invokes the passed <code>NoArgTest</code> function.
* </p>
*
* @param test the no-arg test function to run with a fixture
*/
protected def withFixture(test: NoArgTest) {
test()
}
/**
* Run a test.
*
* <p>
* This trait's implementation uses Java reflection to invoke on this object the test method identified by the passed <code>testName</code>.
* </p>
*
* <p>
* Implementations of this method are responsible for ensuring a <code>TestStarting</code> event
* is fired to the <code>Reporter</code> before executing any test, and either <code>TestSucceeded</code>,
* <code>TestFailed</code>, or <code>TestPending</code> after executing any nested
* <code>Suite</code>. (If a test is marked with the <code>org.scalatest.Ignore</code> tag, the
* <code>runTests</code> method is responsible for ensuring a <code>TestIgnored</code> event is fired and that
* this <code>runTest</code> method is not invoked for that ignored test.)
* </p>
*
* @param testName the name of one test to run.
* @param reporter the <code>Reporter</code> to which results will be reported
* @param stopper the <code>Stopper</code> that will be consulted to determine whether to stop execution early.
* @param configMap a <code>Map</code> of key-value pairs that can be used by the executing <code>Suite</code> of tests.
* @param tracker a <code>Tracker</code> tracking <code>Ordinal</code>s being fired by the current thread.
* @throws NullPointerException if any of <code>testName</code>, <code>reporter</code>, <code>stopper</code>, <code>configMap</code>
* or <code>tracker</code> is <code>null</code>.
* @throws IllegalArgumentException if <code>testName</code> is defined, but no test with the specified test name
* exists in this <code>Suite</code>
*/
protected def runTest(testName: String, reporter: Reporter, stopper: Stopper, configMap: Map[String, Any], tracker: Tracker) {
if (testName == null || reporter == null || stopper == null || configMap == null || tracker == null)
throw new NullPointerException
val stopRequested = stopper
val report = wrapReporterIfNecessary(reporter)
val method =
try {
getMethodForTestName(testName)
}
catch {
case e: NoSuchMethodException =>
throw new IllegalArgumentException(Resources("testNotFound", testName))
case e =>
throw e
}
// Create a Rerunner if the Suite has a no-arg constructor
val hasPublicNoArgConstructor = Suite.checkForPublicNoArgConstructor(getClass)
val rerunnable =
if (hasPublicNoArgConstructor)
Some(new TestRerunner(getClass.getName, testName))
else
None
val testStartTime = System.currentTimeMillis
report(TestStarting(tracker.nextOrdinal(), thisSuite.suiteName, Some(thisSuite.getClass.getName), testName, None, rerunnable))
val args: Array[Object] =
if (testMethodTakesInformer(testName)) {
val informer =
new Informer {
def apply(message: String) {
if (message == null)
throw new NullPointerException
report(InfoProvided(tracker.nextOrdinal(), message, Some(NameInfo(thisSuite.suiteName, Some(thisSuite.getClass.getName), Some(testName)))))
}
}
Array(informer)
}
else Array()
try {
val theConfigMap = configMap
withFixture(
new NoArgTest {
def name = testName
def apply() { method.invoke(thisSuite, args: _*) }
def configMap = theConfigMap
}
)
val duration = System.currentTimeMillis - testStartTime
report(TestSucceeded(tracker.nextOrdinal(), thisSuite.suiteName, Some(thisSuite.getClass.getName), testName, Some(duration), None, rerunnable))
}
catch {
case ite: InvocationTargetException =>
val t = ite.getTargetException
t match {
case _: TestPendingException =>
report(TestPending(tracker.nextOrdinal(), thisSuite.suiteName, Some(thisSuite.getClass.getName), testName))
case e if !anErrorThatShouldCauseAnAbort(e) =>
val duration = System.currentTimeMillis - testStartTime
handleFailedTest(t, hasPublicNoArgConstructor, testName, rerunnable, report, tracker, duration)
case e => throw e
}
case e if !anErrorThatShouldCauseAnAbort(e) =>
val duration = System.currentTimeMillis - testStartTime
handleFailedTest(e, hasPublicNoArgConstructor, testName, rerunnable, report, tracker, duration)
case e => throw e
}
}
/**
* Run zero to many of this <code>Suite</code>'s tests.
*
* <p>
* This method takes a <code>testName</code> parameter that optionally specifies a test to invoke.
* If <code>testName</code> is defined, this trait's implementation of this method
* invokes <code>runTest</code> on this object, passing in:
* </p>
*
* <ul>
* <li><code>testName</code> - the <code>String</code> value of the <code>testName</code> <code>Option</code> passed
* to this method</li>
* <li><code>reporter</code> - the <code>Reporter</code> passed to this method, or one that wraps and delegates to it</li>
* <li><code>stopper</code> - the <code>Stopper</code> passed to this method, or one that wraps and delegates to it</li>
* <li><code>configMap</code> - the <code>configMap</code> <code>Map</code> passed to this method, or one that wraps and delegates to it</li>
* </ul>
*
* <p>
* This method takes a <code>Filter</code>, which encapsulates an optional <code>Set</code> of tag names that should be included
* (<code>tagsToInclude</code>) and a <code>Set</code> that should be excluded (<code>tagsToExclude</code>), when deciding which
* of this <code>Suite</code>'s tests to run.
* If <code>tagsToInclude</code> is <code>None</code>, all tests will be run
* except those those belonging to tags listed in the <code>tagsToExclude</code> <code>Set</code>. If <code>tagsToInclude</code> is defined, only tests
* belonging to tags mentioned in the <code>tagsToInclude</code> <code>Set</code>, and not mentioned in the <code>tagsToExclude</code <code>Set</code>
* will be run. However, if <code>testName</code> is defined, <code>tagsToInclude</code> and <code>tagsToExclude</code> are essentially ignored.
* Only if <code>testName</code> is <code>None</code> will <code>tagsToInclude</code> and <code>tagsToExclude</code> be consulted to
* determine which of the tests named in the <code>testNames</code> <code>Set</code> should be run. This trait's implementation
* behaves this way, and it is part of the general contract of this method, so all overridden forms of this method should behave
* this way as well. For more information on test tags, see the main documentation for this trait and for class <a href="Filter"><code>Filter</code></a>.
* Note that this means that even if a test is marked as ignored, for example a test method in a <code>Suite</code> annotated with
* <code>org.scalatest.Ignore</code>, if that test name is passed as <code>testName</code> to <code>runTest</code>, it will be invoked
* despite the <code>Ignore</code> annotation.
* </p>
*
* <p>
* If <code>testName</code> is <code>None</code>, this trait's implementation of this method
* invokes <code>testNames</code> on this <code>Suite</code> to get a <code>Set</code> of names of tests to potentially run.
* (A <code>testNames</code> value of <code>None</code> essentially acts as a wildcard that means all tests in
* this <code>Suite</code> that are selected by <code>tagsToInclude</code> and <code>tagsToExclude</code> should be run.)
* For each test in the <code>testName</code> <code>Set</code>, in the order
* they appear in the iterator obtained by invoking the <code>elements</code> method on the <code>Set</code>, this trait's implementation
* of this method checks whether the test should be run based on the <code>Filter</code>.
* If so, this implementation invokes <code>runTest</code>, passing in:
* </p>
*
* <ul>
* <li><code>testName</code> - the <code>String</code> name of the test to run (which will be one of the names in the <code>testNames</code> <code>Set</code>)</li>
* <li><code>reporter</code> - the <code>Reporter</code> passed to this method, or one that wraps and delegates to it</li>
* <li><code>stopper</code> - the <code>Stopper</code> passed to this method, or one that wraps and delegates to it</li>
* <li><code>configMap</code> - the <code>configMap</code> passed to this method, or one that wraps and delegates to it</li>
* </ul>
*
* <p>
* If a test is marked with the <code>org.scalatest.Ignore</code> tag, implementations
* of this method are responsible for ensuring a <code>TestIgnored</code> event is fired for that test
* and that <code>runTest</code> is not called for that test.
* </p>
*
* @param testName an optional name of one test to run. If <code>None</code>, all relevant tests should be run.
* I.e., <code>None</code> acts like a wildcard that means run all relevant tests in this <code>Suite</code>.
* @param reporter the <code>Reporter</code> to which results will be reported
* @param stopper the <code>Stopper</code> that will be consulted to determine whether to stop execution early.
* @param filter a <code>Filter</code> with which to filter tests based on their tags
* @param configMap a <code>Map</code> of key-value pairs that can be used by the executing <code>Suite</code> of tests.
* @param distributor an optional <code>Distributor</code>, into which to put nested <code>Suite</code>s to be run
* by another entity, such as concurrently by a pool of threads. If <code>None</code>, nested <code>Suite</code>s will be run sequentially.
* @param tracker a <code>Tracker</code> tracking <code>Ordinal</code>s being fired by the current thread.
* @throws NullPointerException if any of the passed parameters is <code>null</code>.
* @throws IllegalArgumentException if <code>testName</code> is defined, but no test with the specified test name
* exists in this <code>Suite</code>
*/
protected def runTests(testName: Option[String], reporter: Reporter, stopper: Stopper, filter: Filter,
configMap: Map[String, Any], distributor: Option[Distributor], tracker: Tracker) {
if (testName == null)
throw new NullPointerException("testName was null")
if (reporter == null)
throw new NullPointerException("reporter was null")
if (stopper == null)
throw new NullPointerException("stopper was null")
if (filter == null)
throw new NullPointerException("filter was null")
if (configMap == null)
throw new NullPointerException("configMap was null")
if (distributor == null)
throw new NullPointerException("distributor was null")
if (tracker == null)
throw new NullPointerException("tracker was null")
val stopRequested = stopper
// Wrap any non-DispatchReporter, non-CatchReporter in a CatchReporter,
// so that exceptions are caught and transformed
// into error messages on the standard error stream.
val report = wrapReporterIfNecessary(reporter)
// If a testName is passed to run, just run that, else run the tests returned
// by testNames.
testName match {
case Some(tn) => runTest(tn, report, stopRequested, configMap, tracker)
case None =>
for ((tn, ignoreTest) <- filter(testNames, tags))
if (ignoreTest)
report(TestIgnored(tracker.nextOrdinal(), thisSuite.suiteName, Some(thisSuite.getClass.getName), tn))
else
runTest(tn, report, stopRequested, configMap, tracker)
}
}
/**
* Runs this suite of tests.
*
* <p>If <code>testName</code> is <code>None</code>, this trait's implementation of this method
* calls these two methods on this object in this order:</p>
*
* <ol>
* <li><code>runNestedSuites(report, stopper, tagsToInclude, tagsToExclude, configMap, distributor)</code></li>
* <li><code>runTests(testName, report, stopper, tagsToInclude, tagsToExclude, configMap)</code></li>
* </ol>
*
* <p>
* If <code>testName</code> is defined, then this trait's implementation of this method
* calls <code>runTests</code>, but does not call <code>runNestedSuites</code>. This behavior
* is part of the contract of this method. Subclasses that override <code>run</code> must take
* care not to call <code>runNestedSuites</code> if <code>testName</code> is defined. (The
* <code>OneInstancePerTest</code> trait depends on this behavior, for example.)
* </p>
*
* <p>
* Subclasses and subtraits that override this <code>run</code> method can implement them without
* invoking either the <code>runTests</code> or <code>runNestedSuites</code> methods, which
* are invoked by this trait's implementation of this method. It is recommended, but not required,
* that subclasses and subtraits that override <code>run</code> in a way that does not
* invoke <code>runNestedSuites</code> also override <code>runNestedSuites</code> and make it
* final. Similarly it is recommended, but not required,
* that subclasses and subtraits that override <code>run</code> in a way that does not
* invoke <code>runTests</code> also override <code>runTests</code> (and <code>runTest</code>,
* which this trait's implementation of <code>runTests</code> calls) and make it
* final. The implementation of these final methods can either invoke the superclass implementation
* of the method, or throw an <code>UnsupportedOperationException</code> if appropriate. The
* reason for this recommendation is that ScalaTest includes several traits that override
* these methods to allow behavior to be mixed into a <code>Suite</code>. For example, trait
* <code>BeforeAndAfterEach</code> overrides <code>runTests</code>s. In a <code>Suite</code>
* subclass that no longer invokes <code>runTests</code> from <code>run</code>, the
* <code>BeforeAndAfterEach</code> trait is not applicable. Mixing it in would have no effect.
* By making <code>runTests</code> final in such a <code>Suite</code> subtrait, you make
* the attempt to mix <code>BeforeAndAfterEach</code> into a subclass of your subtrait
* a compiler error. (It would fail to compile with a complaint that <code>BeforeAndAfterEach</code>
* is trying to override <code>runTests</code>, which is a final method in your trait.)
* </p>
*
* @param testName an optional name of one test to run. If <code>None</code>, all relevant tests should be run.
* I.e., <code>None</code> acts like a wildcard that means run all relevant tests in this <code>Suite</code>.
* @param reporter the <code>Reporter</code> to which results will be reported
* @param stopper the <code>Stopper</code> that will be consulted to determine whether to stop execution early.
* @param filter a <code>Filter</code> with which to filter tests based on their tags
* @param configMap a <code>Map</code> of key-value pairs that can be used by the executing <code>Suite</code> of tests.
* @param distributor an optional <code>Distributor</code>, into which to put nested <code>Suite</code>s to be run
* by another entity, such as concurrently by a pool of threads. If <code>None</code>, nested <code>Suite</code>s will be run sequentially.
* @param tracker a <code>Tracker</code> tracking <code>Ordinal</code>s being fired by the current thread.
*
* @throws NullPointerException if any passed parameter is <code>null</code>.
* @throws IllegalArgumentException if <code>testName</code> is defined, but no test with the specified test name
* exists in this <code>Suite</code>
*/
def run(testName: Option[String], reporter: Reporter, stopper: Stopper, filter: Filter,
configMap: Map[String, Any], distributor: Option[Distributor], tracker: Tracker) {
if (testName == null)
throw new NullPointerException("testName was null")
if (reporter == null)
throw new NullPointerException("reporter was null")
if (stopper == null)
throw new NullPointerException("stopper was null")
if (filter == null)
throw new NullPointerException("filter was null")
if (configMap == null)
throw new NullPointerException("configMap was null")
if (distributor == null)
throw new NullPointerException("distributor was null")
if (tracker == null)
throw new NullPointerException("tracker was null")
val stopRequested = stopper
val report = wrapReporterIfNecessary(reporter)
testName match {
case None => runNestedSuites(report, stopRequested, filter, configMap, distributor, tracker)
case Some(_) =>
}
runTests(testName, report, stopRequested, filter, configMap, distributor, tracker)
if (stopRequested()) {
val rawString = Resources("executeStopping")
report(InfoProvided(tracker.nextOrdinal(), rawString, Some(NameInfo(thisSuite.suiteName, Some(thisSuite.getClass.getName), testName))))
}
}
private def handleFailedTest(throwable: Throwable, hasPublicNoArgConstructor: Boolean, testName: String,
rerunnable: Option[Rerunner], report: Reporter, tracker: Tracker, duration: Long) {
val message =
if (throwable.getMessage != null) // [bv: this could be factored out into a helper method]
throwable.getMessage
else
throwable.toString
report(TestFailed(tracker.nextOrdinal(), message, thisSuite.suiteName, Some(thisSuite.getClass.getName), testName, Some(throwable), Some(duration), None, rerunnable))
}
/**
*
* Run zero to many of this <code>Suite</code>'s nested <code>Suite</code>s.
*
* <p>
* If the passed <code>distributor</code> is <code>None</code>, this trait's
* implementation of this method invokes <code>run</code> on each
* nested <code>Suite</code> in the <code>List</code> obtained by invoking <code>nestedSuites</code>.
* If a nested <code>Suite</code>'s <code>run</code>
* method completes abruptly with an exception, this trait's implementation of this
* method reports that the <code>Suite</code> aborted and attempts to run the
* next nested <code>Suite</code>.
* If the passed <code>distributor</code> is defined, this trait's implementation
* puts each nested <code>Suite</code>
* into the <code>Distributor</code> contained in the <code>Some</code>, in the order in which the
* <code>Suite</code>s appear in the <code>List</code> returned by <code>nestedSuites</code>, passing
* in a new <code>Tracker</code> obtained by invoking <code>nextTracker</code> on the <code>Tracker</code>
* passed to this method.
* </p>
*
* <p>
* Implementations of this method are responsible for ensuring <code>SuiteStarting</code> events
* are fired to the <code>Reporter</code> before executing any nested <code>Suite</code>, and either <code>SuiteCompleted</code>
* or <code>SuiteAborted</code> after executing any nested <code>Suite</code>.
* </p>
*
* @param reporter the <code>Reporter</code> to which results will be reported
* @param stopper the <code>Stopper</code> that will be consulted to determine whether to stop execution early.
* @param filter a <code>Filter</code> with which to filter tests based on their tags
* @param configMap a <code>Map</code> of key-value pairs that can be used by the executing <code>Suite</code> of tests.
* @param distributor an optional <code>Distributor</code>, into which to put nested <code>Suite</code>s to be run
* by another entity, such as concurrently by a pool of threads. If <code>None</code>, nested <code>Suite</code>s will be run sequentially.
* @param tracker a <code>Tracker</code> tracking <code>Ordinal</code>s being fired by the current thread.
*
* @throws NullPointerException if any passed parameter is <code>null</code>.
*/
protected def runNestedSuites(reporter: Reporter, stopper: Stopper, filter: Filter,
configMap: Map[String, Any], distributor: Option[Distributor], tracker: Tracker) {
if (reporter == null)
throw new NullPointerException("reporter was null")
if (stopper == null)
throw new NullPointerException("stopper was null")
if (filter == null)
throw new NullPointerException("filter was null")
if (configMap == null)
throw new NullPointerException("configMap was null")
if (distributor == null)
throw new NullPointerException("distributor was null")
if (tracker == null)
throw new NullPointerException("tracker was null")
val stopRequested = stopper
val report = wrapReporterIfNecessary(reporter)
def callExecuteOnSuite(nestedSuite: Suite) {
if (!stopRequested()) {
// Create a Rerunner if the Suite has a no-arg constructor
val hasPublicNoArgConstructor = Suite.checkForPublicNoArgConstructor(nestedSuite.getClass)
val rerunnable =
if (hasPublicNoArgConstructor)
Some(new SuiteRerunner(nestedSuite.getClass.getName))
else
None
val rawString = Resources("suiteExecutionStarting")
val formatter = formatterForSuiteStarting(nestedSuite)
val suiteStartTime = System.currentTimeMillis
report(SuiteStarting(tracker.nextOrdinal(), nestedSuite.suiteName, Some(nestedSuite.getClass.getName), formatter, rerunnable))
try {
// Same thread, so OK to send same tracker
nestedSuite.run(None, report, stopRequested, filter, configMap, distributor, tracker)
val rawString = Resources("suiteCompletedNormally")
val formatter = formatterForSuiteCompleted(nestedSuite)
val duration = System.currentTimeMillis - suiteStartTime
report(SuiteCompleted(tracker.nextOrdinal(), nestedSuite.suiteName, Some(thisSuite.getClass.getName), Some(duration), formatter, rerunnable))
}
catch {
case e: RuntimeException => {
val rawString = Resources("executeException")
val formatter = formatterForSuiteAborted(nestedSuite, rawString)
val duration = System.currentTimeMillis - suiteStartTime
report(SuiteAborted(tracker.nextOrdinal(), rawString, nestedSuite.suiteName, Some(thisSuite.getClass.getName), Some(e), Some(duration), formatter, rerunnable))
}
}
}
}
distributor match {
case None => nestedSuites.foreach(callExecuteOnSuite)
case Some(distribute) =>
for (nestedSuite <- nestedSuites)
distribute(nestedSuite, tracker.nextTracker())
}
}
/**
* A user-friendly suite name for this <code>Suite</code>.
*
* <p>
* This trait's
* implementation of this method returns the simple name of this object's class. This
* trait's implementation of <code>runNestedSuites</code> calls this method to obtain a
* name for <code>Report</code>s to pass to the <code>suiteStarting</code>, <code>suiteCompleted</code>,
* and <code>suiteAborted</code> methods of the <code>Reporter</code>.
* </p>
*
* @return this <code>Suite</code> object's suite name.
*/
def suiteName = getSimpleNameOfAnObjectsClass(thisSuite)
/**
* Throws <code>TestPendingException</code> to indicate a test is pending.
*
* <p>
* A <em>pending test</em> is one that has been given a name but is not yet implemented. The purpose of
* pending tests is to facilitate a style of testing in which documentation of behavior is sketched
* out before tests are written to verify that behavior (and often, the before the behavior of
* the system being tested is itself implemented). Such sketches form a kind of specification of
* what tests and functionality to implement later.
* </p>
*
* <p>
* To support this style of testing, a test can be given a name that specifies one
* bit of behavior required by the system being tested. The test can also include some code that
* sends more information about the behavior to the reporter when the tests run. At the end of the test,
* it can call method <code>pending</code>, which will cause it to complete abruptly with <code>TestPendingException</code>.
* Because tests in ScalaTest can be designated as pending with <code>TestPendingException</code>, both the test name and any information
* sent to the reporter when running the test can appear in the report of a test run. (In other words,
* the code of a pending test is executed just like any other test.) However, because the test completes abruptly
* with <code>TestPendingException</code>, the test will be reported as pending, to indicate
* the actual test, and possibly the functionality it is intended to test, has not yet been implemented.
* </p>
*
* <p>
* Note: This method always completes abruptly with a <code>TestPendingException</code>. Thus it always has a side
* effect. Methods with side effects are usually invoked with parentheses, as in <code>pending()</code>. This
* method is defined as a parameterless method, in flagrant contradiction to recommended Scala style, because it
* forms a kind of DSL for pending tests. It enables tests in suites such as <code>FunSuite</code> or <code>Spec</code>
* to be denoted by placing "<code>(pending)</code>" after the test name, as in:
* </p>
*
* <pre>
* test("that style rules are not laws") (pending)
* </pre>
*
* <p>
* Readers of the code see "pending" in parentheses, which looks like a little note attached to the test name to indicate
* it is pending. Whereas "<code>(pending())</code> looks more like a method call, "<code>(pending)</code>" lets readers
* stay at a higher level, forgetting how it is implemented and just focusing on the intent of the programmer who wrote the code.
* </p>
*/
def pending: PendingNothing = { throw new TestPendingException }
/**
* Execute the passed block of code, and if it completes abruptly, throw <code>TestPendingException</code>, else
* throw <code>TestFailedException</code>.
*
* <p>
* This method can be used to temporarily change a failing test into a pending test in such a way that it will
* automatically turn back into a failing test once the problem originally causing the test to fail has been fixed.
* At that point, you need only remove the <code>pendingUntilFixed</code> call. In other words, a
* <code>pendingUntilFixed</code> surrounding a block of code that isn't broken is treated as a test failure.
* The motivation for this behavior is to encourage people to remove <code>pendingUntilFixed</code> calls when
* there are no longer needed.
* </p>
*
* <p>
* This method facilitates a style of testing in which tests are written before the code they test. Sometimes you may
* encounter a test failure that requires more functionality than you want to tackle without writing more tests. In this
* case you can mark the bit of test code causing the failure with <code>pendingUntilFixed</code>. You can then write more
* tests and functionality that eventually will get your production code to a point where the original test won't fail anymore.
* At this point the code block marked with <code>pendingUntilFixed</code> will no longer throw an exception (because the
* problem has been fixed). This will in turn cause <code>pendingUntilFixed</code> to throw <code>TestFailedException</code>
* with a detail message explaining you need to go back and remove the <code>pendingUntilFixed</code> call as the problem orginally
* causing your test code to fail has been fixed.
* </p>
*
* @param f a block of code, which if it completes abruptly, should trigger a <code>TestPendingException</code>
* @throws TestPendingException if the passed block of code completes abruptly with an <code>Exception</code> or <code>AssertionError</code>
*/
def pendingUntilFixed(f: => Unit) {
val isPending =
try {
f
false
}
catch {
case _: Exception => true
case _: AssertionError => true
}
if (isPending)
throw new TestPendingException
else
throw new TestFailedException(Resources("pendingUntilFixed"), 2)
}
/**
* The total number of tests that are expected to run when this <code>Suite</code>'s <code>run</code> method is invoked.
*
* <p>
* This trait's implementation of this method returns the sum of:
* </p>
*
* <ul>
* <li>the size of the <code>testNames</code> <code>List</code>, minus the number of tests marked as ignored
* <li>the sum of the values obtained by invoking
* <code>expectedTestCount</code> on every nested <code>Suite</code> contained in
* <code>nestedSuites</code>
* </ul>
*
* @param filter a <code>Filter</code> with which to filter tests to count based on their tags
*/
def expectedTestCount(filter: Filter): Int = {
// [bv: here was another tricky refactor. How to increment a counter in a loop]
def countNestedSuiteTests(nestedSuites: List[Suite], filter: Filter): Int =
nestedSuites match {
case List() => 0
case nestedSuite :: nestedSuites => nestedSuite.expectedTestCount(filter) +
countNestedSuiteTests(nestedSuites, filter)
}
filter.runnableTestCount(testNames, tags) + countNestedSuiteTests(nestedSuites, filter)
}
// Wrap any non-DispatchReporter, non-CatchReporter in a CatchReporter,
// so that exceptions are caught and transformed
// into error messages on the standard error stream.
private[scalatest] def wrapReporterIfNecessary(reporter: Reporter) = reporter match {
case dr: DispatchReporter => dr
case cr: CatchReporter => cr
case _ => new CatchReporter(reporter)
}
}
private[scalatest] object Suite {
private[scalatest] val TestMethodPrefix = "test"
private[scalatest] val InformerInParens = "(Informer)"
private[scalatest] val IgnoreAnnotation = "org.scalatest.Ignore"
private[scalatest] def getSimpleNameOfAnObjectsClass(o: AnyRef) = stripDollars(parseSimpleName(o.getClass().getName()))
// [bv: this is a good example of the expression type refactor. I moved this from SuiteClassNameListCellRenderer]
// this will be needed by the GUI classes, etc.
private[scalatest] def parseSimpleName(fullyQualifiedName: String) = {
val dotPos = fullyQualifiedName.lastIndexOf('.')
// [bv: need to check the dotPos != fullyQualifiedName.length]
if (dotPos != -1 && dotPos != fullyQualifiedName.length)
fullyQualifiedName.substring(dotPos + 1)
else
fullyQualifiedName
}
private[scalatest] def checkForPublicNoArgConstructor(clazz: java.lang.Class[_]) = {
try {
val constructor = clazz.getConstructor(new Array[java.lang.Class[T] forSome { type T }](0): _*)
Modifier.isPublic(constructor.getModifiers)
}
catch {
case nsme: NoSuchMethodException => false
}
}
private[scalatest] def stripDollars(s: String): String = {
val lastDollarIndex = s.lastIndexOf('$')
if (lastDollarIndex < s.length - 1)
if (lastDollarIndex == -1 || !s.startsWith("line")) s else s.substring(lastDollarIndex + 1)
else {
// The last char is a dollar sign
val lastNonDollarChar = s.reverse.find(_ != '$')
lastNonDollarChar match {
case None => s
case Some(c) => {
val lastNonDollarIndex = s.lastIndexOf(c)
if (lastNonDollarIndex == -1) s
else stripDollars(s.substring(0, lastNonDollarIndex + 1))
}
}
}
}
private[scalatest] def diffStrings(s: String, t: String): Tuple2[String, String] = {
def findCommonPrefixLength(s: String, t: String): Int = {
val max = s.length.min(t.length) // the maximum potential size of the prefix
var i = 0
var found = false
while (i < max & !found) {
found = (s.charAt(i) != t.charAt(i))
if (!found)
i = i + 1
}
i
}
def findCommonSuffixLength(s: String, t: String): Int = {
val max = s.length.min(t.length) // the maximum potential size of the suffix
var i = 0
var found = false
while (i < max & !found) {
found = (s.charAt(s.length - 1 - i) != t.charAt(t.length - 1 - i))
if (!found)
i = i + 1
}
i
}
val commonPrefixLength = findCommonPrefixLength(s, t)
val commonSuffixLength = findCommonSuffixLength(s.substring(commonPrefixLength), t.substring(commonPrefixLength))
val prefix = s.substring(0, commonPrefixLength)
val suffix = if (s.length - commonSuffixLength < 0) "" else s.substring(s.length - commonSuffixLength)
val sMiddleEnd = s.length - commonSuffixLength
val tMiddleEnd = t.length - commonSuffixLength
val sMiddle = s.substring(commonPrefixLength, sMiddleEnd)
val tMiddle = t.substring(commonPrefixLength, tMiddleEnd)
val MaxContext = 20
val shortPrefix = if (commonPrefixLength > MaxContext) "..." + prefix.substring(prefix.length - MaxContext) else prefix
val shortSuffix = if (commonSuffixLength > MaxContext) suffix.substring(0, MaxContext) + "..." else suffix
(shortPrefix + "[" + sMiddle + "]" + shortSuffix, shortPrefix + "[" + tMiddle + "]" + shortSuffix)
}
// If the objects are two strings, replace them with whatever is returned by diffStrings.
// Otherwise, use the same objects.
private[scalatest] def getObjectsForFailureMessage(a: Any, b: Any) =
a match {
case aStr: String => {
b match {
case bStr: String => {
Suite.diffStrings(aStr, bStr)
}
case _ => (a, b)
}
}
case _ => (a, b)
}
private[scalatest] def formatterForSuiteStarting(suite: Suite): Option[Formatter] =
suite match {
case spec: Spec => Some(IndentedText(suite.suiteName + ":", suite.suiteName, 0))
case spec: FlatSpec => Some(IndentedText(suite.suiteName + ":", suite.suiteName, 0))
case spec: WordSpec => Some(IndentedText(suite.suiteName + ":", suite.suiteName, 0))
case spec: FeatureSpec => Some(IndentedText(suite.suiteName + ":", suite.suiteName, 0))
case _ => None
}
private[scalatest] def formatterForSuiteCompleted(suite: Suite): Option[Formatter] =
suite match {
case spec: Spec => Some(MotionToSuppress)
case spec: FlatSpec => Some(MotionToSuppress)
case spec: WordSpec => Some(MotionToSuppress)
case spec: FeatureSpec => Some(MotionToSuppress)
case _ => None
}
private[scalatest] def formatterForSuiteAborted(suite: Suite, message: String): Option[Formatter] = {
suite match {
case spec: Spec => Some(IndentedText(message, message, 0))
case spec: FlatSpec => Some(IndentedText(message, message, 0))
case spec: WordSpec => Some(IndentedText(message, message, 0))
case spec: FeatureSpec => Some(IndentedText(message, message, 0))
case _ => None
}
}
private def simpleNameForTest(testName: String) =
if (testName.endsWith(InformerInParens))
testName.substring(0, testName.length - InformerInParens.length)
else
testName
private[scalatest] def anErrorThatShouldCauseAnAbort(throwable: Throwable) =
throwable match {
case _: AnnotationFormatError => true
case _: AWTError => true
case _: CoderMalfunctionError => true
case _: FactoryConfigurationError => true
case _: LinkageError => true
case _: ThreadDeath => true
case _: TransformerFactoryConfigurationError => true
case _: VirtualMachineError => true
case _ => false
}
}
| kevinwright/scalatest | src/main/scala/org/scalatest/Suite.scala | Scala | apache-2.0 | 100,196 |
package de.tudarmstadt.lt.flinkdt.tasks
import de.tudarmstadt.lt.flinkdt.types._
import org.apache.flink.api.common.typeinfo.TypeInformation
import org.apache.flink.api.scala._
import scala.reflect.ClassTag
/**
* Created by Steffen Remus
*/
object N11Sum {
def apply[C <: CT2 : ClassTag : TypeInformation, T1 : ClassTag : TypeInformation, T2 : ClassTag : TypeInformation]() = new N11Sum[C, T1, T2]()
}
class N11Sum[C <: CT2 : ClassTag : TypeInformation, T1 : ClassTag : TypeInformation, T2 : ClassTag : TypeInformation] extends DSTask[C, C] {
override def process(ds: DataSet[C]): DataSet[C] = {
ds.groupBy("a","b")
// .reduce((l,r) => {l.n11 += r.n11; l}) // .sum("n11")
.sum("n11")
}
}
| remstef/flinkfun | src/main/scala/de/tudarmstadt/lt/flinkdt/tasks/N11Sum.scala | Scala | apache-2.0 | 721 |
import Predef.{augmentString => _, wrapString => _, _}
import scala.reflect.ClassTag
object Test {
import colltest5.strawman.collections._
import CollectionStrawMan5._
def seqOps(xs: Seq[Int]) = {
val x1 = xs.foldLeft("")(_ + _)
val y1: String = x1
val x2 = xs.foldRight("")(_ + _)
val y2: String = x2
val x3 = xs.indexWhere(_ % 2 == 0)
val y3: Int = x3
val x4 = xs.head
val y4: Int = x4
val x5 = xs.to(List)
val y5: List[Int] = x5
val (xs6, xs7) = xs.partition(_ % 2 == 0)
val ys6: Seq[Int] = xs6
val ys7: Seq[Int] = xs7
val xs8 = xs.drop(2)
val ys8: Seq[Int] = xs8
val xs9 = xs.map(_ >= 0)
val ys9: Seq[Boolean] = xs9
val xs10 = xs.flatMap(x => Cons(x, Cons(-x, Nil)))
val ys10: Seq[Int] = xs10
val xs11 = xs ++ xs
val ys11: Seq[Int] = xs11
val xs12 = xs ++ Nil
val ys12: Seq[Int] = xs12
val xs13 = Nil ++ xs
val ys13: Seq[Int] = xs13
val xs14 = xs ++ Cons("a", Nil)
val ys14: Seq[Any] = xs14
val xs15 = xs.zip(xs9)
val ys15: Seq[(Int, Boolean)] = xs15
val xs16 = xs.reverse
val ys16: Seq[Int] = xs16
println("-------")
println(x1)
println(x2)
println(x3)
println(x4)
println(x5)
println(xs6)
println(xs7)
println(xs8)
println(xs9)
println(xs10)
println(xs11)
println(xs12)
println(xs13)
println(xs14)
println(xs15)
println(xs16)
}
def viewOps(xs: View[Int]) = {
val x1 = xs.foldLeft("")(_ + _)
val y1: String = x1
val x2 = xs.foldRight("")(_ + _)
val y2: String = x2
val x3 = xs.indexWhere(_ % 2 == 0)
val y3: Int = x3
val x4 = xs.head
val y4: Int = x4
val x5 = xs.to(List)
val y5: List[Int] = x5
val (xs6, xs7) = xs.partition(_ % 2 == 0)
val ys6: View[Int] = xs6
val ys7: View[Int] = xs7
val xs8 = xs.drop(2)
val ys8: View[Int] = xs8
val xs9 = xs.map(_ >= 0)
val ys9: View[Boolean] = xs9
val xs10 = xs.flatMap(x => Cons(x, Cons(-x, Nil)))
val ys10: View[Int] = xs10
val xs11 = xs ++ xs
val ys11: View[Int] = xs11
val xs12 = xs ++ Nil
val ys12: View[Int] = xs12
val xs13 = Nil ++ xs
val ys13: List[Int] = xs13
val xs14 = xs ++ Cons("a", Nil)
val ys14: View[Any] = xs14
val xs15 = xs.zip(xs9)
val ys15: View[(Int, Boolean)] = xs15
println("-------")
println(x1)
println(x2)
println(x3)
println(x4)
println(x5)
println(xs6.to(List))
println(xs7.to(List))
println(xs8.to(List))
println(xs9.to(List))
println(xs10.to(List))
println(xs11.to(List))
println(xs12.to(List))
println(xs13.to(List))
println(xs14.to(List))
println(xs15.to(List))
}
def stringOps(xs: String) = {
val x1 = xs.foldLeft("")(_ + _)
val y1: String = x1
val x2 = xs.foldRight("")(_ + _)
val y2: String = x2
val x3 = xs.indexWhere(_ % 2 == 0)
val y3: Int = x3
val x4 = xs.head
val y4: Int = x4
val x5 = xs.to(List)
val y5: List[Char] = x5
val (xs6, xs7) = xs.partition(_ % 2 == 0)
val ys6: String = xs6
val ys7: String = xs7
val xs8 = xs.drop(2)
val ys8: String = xs8
val xs9 = xs.map(_ + 1) // !!! need a language change to make this work without the : Char
val ys9: Seq[Int] = xs9
val xs9a = xs.map(_.toUpper) // !!! need a language change to make this work without the : Char
val ys9a: String = xs9a
val xs10 = xs.flatMap((x: Char) => s"$x,$x")
val ys10: String = xs10
val xs11 = xs ++ xs
val ys11: String = xs11
val xs11a = xs ++ List('x', 'y') // Cons('x', Cons('y', Nil))
val ys11a: String = xs11a
val xs12 = xs ++ Nil
val ys12: String = xs12
val xs13 = Nil ++ xs.iterator
val ys13: List[Char] = xs13
val xs14 = xs ++ Cons("xyz", Nil)
val ys14: Seq[Any] = xs14
val xs15 = xs.zip(xs9)
val ys15: Seq[(Char, Int)] = xs15
println("-------")
println(x1)
println(x2)
println(x3)
println(x4)
println(x5)
println(xs6)
println(xs7)
println(xs8)
println(xs9)
println(xs9a)
println(xs10)
println(xs11)
println(xs11a)
println(xs12)
println(xs13)
println(xs14)
println(xs15)
}
def main(args: Array[String]) = {
val ints = Cons(1, Cons(2, Cons(3, Nil)))
val intsBuf = ints.to(ArrayBuffer)
val intsListBuf = ints.to(ListBuffer)
val intsView = ints.view
seqOps(ints)
seqOps(intsBuf)
seqOps(intsListBuf)
viewOps(intsView)
stringOps("abc")
}
}
| som-snytt/dotty | tests/run/colltest5/CollectionTests_2.scala | Scala | apache-2.0 | 4,538 |
package pages.common
import org.openqa.selenium.WebDriver
import org.scalatest.selenium.WebBrowser.{find, id}
import views.vrm_assign.Main.BackId
object MainPanel {
/** back button is removed from the panel. This is left here is case of refactoring */
def back(implicit driver: WebDriver) = find(id(BackId)).get
} | dvla/vrm-assign-online | test/pages/common/MainPanel.scala | Scala | mit | 320 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v3
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.ct600.v3.retriever.RepaymentsBoxRetriever
import uk.gov.hmrc.ct.box.ValidatableBox._
case class B960_3(value: Option[String]) extends CtBoxIdentifier("Payee Address Line 3")
with CtOptionalString with Input with ValidatableBox[RepaymentsBoxRetriever] {
override def validate(boxRetriever: RepaymentsBoxRetriever): Set[CtValidation] = {
validateOptionalStringByLength("B960_3", this, 1, 28) ++
validateOptionalStringByRegex("B960_3", this, ValidNonForeignMoreRestrictiveCharacters)
}
}
| pncampbell/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v3/B960_3.scala | Scala | apache-2.0 | 1,191 |
/*
* Copyright 2009-2010 LinkedIn, Inc
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.norbert
package network
/**
* Base class from which Norbert's networking related exceptions inherit.
*/
class NetworkingException(message: String, cause: Throwable) extends NorbertException(message, cause) {
def this() = this(null, null)
def this(message: String) = this(message, null)
def this(cause: Throwable) = this(cause.getMessage, cause)
}
/**
* Exception that indicates that a method was called after the network system has been shut down.
*/
class NetworkShutdownException extends NetworkingException
/**
* Exception that indicates that an exception occurred remotely while processing a request.
*/
class RemoteException(className: String, errorMsg: String) extends NetworkingException("The remote end threw an exception [%s]: %s".format(className, errorMsg))
/**
* Exception that indicates that a message was received which was not registered with the <code>MessageRegistry</code>.
*/
class InvalidMessageException(errorMsg: String) extends NetworkingException(errorMsg)
/**
* Exception that indicates that a malformed response was received.
*/
class InvalidResponseException(errorMsg: String) extends NetworkingException(errorMsg)
/**
* Exception that indicates that a method has been called before the networking system has been started.
*/
class NetworkNotStartedException extends NetworkingException
/**
* Exception that indicates that no nodes are available to process the message.
*/
class NoNodesAvailableException(errorMsg: String) extends NetworkingException(errorMsg)
/**
* Exception that indicates that a method has been called before the network server has been bound.
*/
class NetworkServerNotBoundException extends NetworkingException
/**
* Exception that indicates that the message was rejected because the waiting queue is full.
*/
class HeavyLoadException extends NetworkingException
/**
* Exception that indicates that this machine is currently down to GC and should not receive new requests in this slot.
*/
class GcException extends NetworkingException
| linkedin/norbert | network/src/main/scala/com/linkedin/norbert/network/NetworkingException.scala | Scala | apache-2.0 | 2,639 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.execution.columnar
import org.apache.spark.TaskContext
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.dsl.expressions._
import org.apache.spark.sql.catalyst.expressions._
import org.apache.spark.sql.catalyst.plans.QueryPlan
import org.apache.spark.sql.catalyst.plans.physical.Partitioning
import org.apache.spark.sql.execution.{ColumnarBatchScan, LeafExecNode, SparkPlan, WholeStageCodegenExec}
import org.apache.spark.sql.execution.vectorized._
import org.apache.spark.sql.types._
import org.apache.spark.sql.vectorized.{ColumnarBatch, ColumnVector}
case class InMemoryTableScanExec(
attributes: Seq[Attribute],
predicates: Seq[Expression],
@transient relation: InMemoryRelation)
extends LeafExecNode with ColumnarBatchScan {
override protected def innerChildren: Seq[QueryPlan[_]] = Seq(relation) ++ super.innerChildren
override def doCanonicalize(): SparkPlan =
copy(attributes = attributes.map(QueryPlan.normalizeExprId(_, relation.output)),
predicates = predicates.map(QueryPlan.normalizeExprId(_, relation.output)),
relation = relation.canonicalized.asInstanceOf[InMemoryRelation])
override def vectorTypes: Option[Seq[String]] =
Option(Seq.fill(attributes.length)(
if (!conf.offHeapColumnVectorEnabled) {
classOf[OnHeapColumnVector].getName
} else {
classOf[OffHeapColumnVector].getName
}
))
/**
* If true, get data from ColumnVector in ColumnarBatch, which are generally faster.
* If false, get data from UnsafeRow build from CachedBatch
*/
override val supportsBatch: Boolean = {
// In the initial implementation, for ease of review
// support only primitive data types and # of fields is less than wholeStageMaxNumFields
conf.cacheVectorizedReaderEnabled && relation.schema.fields.forall(f => f.dataType match {
case BooleanType | ByteType | ShortType | IntegerType | LongType |
FloatType | DoubleType => true
case _ => false
}) && !WholeStageCodegenExec.isTooManyFields(conf, relation.schema)
}
// TODO: revisit this. Shall we always turn off whole stage codegen if the output data are rows?
override def supportCodegen: Boolean = supportsBatch
private val columnIndices =
attributes.map(a => relation.output.map(o => o.exprId).indexOf(a.exprId)).toArray
private val relationSchema = relation.schema.toArray
private lazy val columnarBatchSchema = new StructType(columnIndices.map(i => relationSchema(i)))
private def createAndDecompressColumn(
cachedColumnarBatch: CachedBatch,
offHeapColumnVectorEnabled: Boolean): ColumnarBatch = {
val rowCount = cachedColumnarBatch.numRows
val taskContext = Option(TaskContext.get())
val columnVectors = if (!offHeapColumnVectorEnabled || taskContext.isEmpty) {
OnHeapColumnVector.allocateColumns(rowCount, columnarBatchSchema)
} else {
OffHeapColumnVector.allocateColumns(rowCount, columnarBatchSchema)
}
val columnarBatch = new ColumnarBatch(columnVectors.asInstanceOf[Array[ColumnVector]])
columnarBatch.setNumRows(rowCount)
for (i <- attributes.indices) {
ColumnAccessor.decompress(
cachedColumnarBatch.buffers(columnIndices(i)),
columnarBatch.column(i).asInstanceOf[WritableColumnVector],
columnarBatchSchema.fields(i).dataType, rowCount)
}
taskContext.foreach(_.addTaskCompletionListener[Unit](_ => columnarBatch.close()))
columnarBatch
}
private lazy val inputRDD: RDD[InternalRow] = {
val buffers = filteredCachedBatches()
val offHeapColumnVectorEnabled = conf.offHeapColumnVectorEnabled
if (supportsBatch) {
// HACK ALERT: This is actually an RDD[ColumnarBatch].
// We're taking advantage of Scala's type erasure here to pass these batches along.
buffers
.map(createAndDecompressColumn(_, offHeapColumnVectorEnabled))
.asInstanceOf[RDD[InternalRow]]
} else {
val numOutputRows = longMetric("numOutputRows")
if (enableAccumulatorsForTest) {
readPartitions.setValue(0)
readBatches.setValue(0)
}
// Using these variables here to avoid serialization of entire objects (if referenced
// directly) within the map Partitions closure.
val relOutput: AttributeSeq = relation.output
filteredCachedBatches().mapPartitionsInternal { cachedBatchIterator =>
// Find the ordinals and data types of the requested columns.
val (requestedColumnIndices, requestedColumnDataTypes) =
attributes.map { a =>
relOutput.indexOf(a.exprId) -> a.dataType
}.unzip
// update SQL metrics
val withMetrics = cachedBatchIterator.map { batch =>
if (enableAccumulatorsForTest) {
readBatches.add(1)
}
numOutputRows += batch.numRows
batch
}
val columnTypes = requestedColumnDataTypes.map {
case udt: UserDefinedType[_] => udt.sqlType
case other => other
}.toArray
val columnarIterator = GenerateColumnAccessor.generate(columnTypes)
columnarIterator.initialize(withMetrics, columnTypes, requestedColumnIndices.toArray)
if (enableAccumulatorsForTest && columnarIterator.hasNext) {
readPartitions.add(1)
}
columnarIterator
}
}
}
override def inputRDDs(): Seq[RDD[InternalRow]] = Seq(inputRDD)
override def output: Seq[Attribute] = attributes
private def updateAttribute(expr: Expression): Expression = {
// attributes can be pruned so using relation's output.
// E.g., relation.output is [id, item] but this scan's output can be [item] only.
val attrMap = AttributeMap(relation.cachedPlan.output.zip(relation.output))
expr.transform {
case attr: Attribute => attrMap.getOrElse(attr, attr)
}
}
// The cached version does not change the outputPartitioning of the original SparkPlan.
// But the cached version could alias output, so we need to replace output.
override def outputPartitioning: Partitioning = {
relation.cachedPlan.outputPartitioning match {
case e: Expression => updateAttribute(e).asInstanceOf[Partitioning]
case other => other
}
}
// The cached version does not change the outputOrdering of the original SparkPlan.
// But the cached version could alias output, so we need to replace output.
override def outputOrdering: Seq[SortOrder] =
relation.cachedPlan.outputOrdering.map(updateAttribute(_).asInstanceOf[SortOrder])
// Keeps relation's partition statistics because we don't serialize relation.
private val stats = relation.partitionStatistics
private def statsFor(a: Attribute) = stats.forAttribute(a)
// Currently, only use statistics from atomic types except binary type only.
private object ExtractableLiteral {
def unapply(expr: Expression): Option[Literal] = expr match {
case lit: Literal => lit.dataType match {
case BinaryType => None
case _: AtomicType => Some(lit)
case _ => None
}
case _ => None
}
}
// Returned filter predicate should return false iff it is impossible for the input expression
// to evaluate to `true' based on statistics collected about this partition batch.
@transient lazy val buildFilter: PartialFunction[Expression, Expression] = {
case And(lhs: Expression, rhs: Expression)
if buildFilter.isDefinedAt(lhs) || buildFilter.isDefinedAt(rhs) =>
(buildFilter.lift(lhs) ++ buildFilter.lift(rhs)).reduce(_ && _)
case Or(lhs: Expression, rhs: Expression)
if buildFilter.isDefinedAt(lhs) && buildFilter.isDefinedAt(rhs) =>
buildFilter(lhs) || buildFilter(rhs)
case EqualTo(a: AttributeReference, ExtractableLiteral(l)) =>
statsFor(a).lowerBound <= l && l <= statsFor(a).upperBound
case EqualTo(ExtractableLiteral(l), a: AttributeReference) =>
statsFor(a).lowerBound <= l && l <= statsFor(a).upperBound
case EqualNullSafe(a: AttributeReference, ExtractableLiteral(l)) =>
statsFor(a).lowerBound <= l && l <= statsFor(a).upperBound
case EqualNullSafe(ExtractableLiteral(l), a: AttributeReference) =>
statsFor(a).lowerBound <= l && l <= statsFor(a).upperBound
case LessThan(a: AttributeReference, ExtractableLiteral(l)) => statsFor(a).lowerBound < l
case LessThan(ExtractableLiteral(l), a: AttributeReference) => l < statsFor(a).upperBound
case LessThanOrEqual(a: AttributeReference, ExtractableLiteral(l)) =>
statsFor(a).lowerBound <= l
case LessThanOrEqual(ExtractableLiteral(l), a: AttributeReference) =>
l <= statsFor(a).upperBound
case GreaterThan(a: AttributeReference, ExtractableLiteral(l)) => l < statsFor(a).upperBound
case GreaterThan(ExtractableLiteral(l), a: AttributeReference) => statsFor(a).lowerBound < l
case GreaterThanOrEqual(a: AttributeReference, ExtractableLiteral(l)) =>
l <= statsFor(a).upperBound
case GreaterThanOrEqual(ExtractableLiteral(l), a: AttributeReference) =>
statsFor(a).lowerBound <= l
case IsNull(a: Attribute) => statsFor(a).nullCount > 0
case IsNotNull(a: Attribute) => statsFor(a).count - statsFor(a).nullCount > 0
case In(a: AttributeReference, list: Seq[Expression])
if list.forall(ExtractableLiteral.unapply(_).isDefined) && list.nonEmpty =>
list.map(l => statsFor(a).lowerBound <= l.asInstanceOf[Literal] &&
l.asInstanceOf[Literal] <= statsFor(a).upperBound).reduce(_ || _)
}
lazy val partitionFilters: Seq[Expression] = {
predicates.flatMap { p =>
val filter = buildFilter.lift(p)
val boundFilter =
filter.map(
BindReferences.bindReference(
_,
stats.schema,
allowFailures = true))
boundFilter.foreach(_ =>
filter.foreach(f => logInfo(s"Predicate $p generates partition filter: $f")))
// If the filter can't be resolved then we are missing required statistics.
boundFilter.filter(_.resolved)
}
}
lazy val enableAccumulatorsForTest: Boolean =
sqlContext.getConf("spark.sql.inMemoryTableScanStatistics.enable", "false").toBoolean
// Accumulators used for testing purposes
lazy val readPartitions = sparkContext.longAccumulator
lazy val readBatches = sparkContext.longAccumulator
private val inMemoryPartitionPruningEnabled = sqlContext.conf.inMemoryPartitionPruning
private def filteredCachedBatches(): RDD[CachedBatch] = {
// Using these variables here to avoid serialization of entire objects (if referenced directly)
// within the map Partitions closure.
val schema = stats.schema
val schemaIndex = schema.zipWithIndex
val buffers = relation.cacheBuilder.cachedColumnBuffers
buffers.mapPartitionsWithIndexInternal { (index, cachedBatchIterator) =>
val partitionFilter = newPredicate(
partitionFilters.reduceOption(And).getOrElse(Literal(true)),
schema)
partitionFilter.initialize(index)
// Do partition batch pruning if enabled
if (inMemoryPartitionPruningEnabled) {
cachedBatchIterator.filter { cachedBatch =>
if (!partitionFilter.eval(cachedBatch.stats)) {
logDebug {
val statsString = schemaIndex.map { case (a, i) =>
val value = cachedBatch.stats.get(i, a.dataType)
s"${a.name}: $value"
}.mkString(", ")
s"Skipping partition based on stats $statsString"
}
false
} else {
true
}
}
} else {
cachedBatchIterator
}
}
}
protected override def doExecute(): RDD[InternalRow] = {
if (supportsBatch) {
WholeStageCodegenExec(this)(codegenStageId = 0).execute()
} else {
inputRDD
}
}
}
| WindCanDie/spark | sql/core/src/main/scala/org/apache/spark/sql/execution/columnar/InMemoryTableScanExec.scala | Scala | apache-2.0 | 12,719 |
package latis.reader.tsml
import latis.reader.tsml.ml.Tsml
/**
* Use column index properties to parse ascii tabular data.
* Map Variable names to zero-based column index(s).
* Each variable spec will be separated by ";" and
* column indices for multiple column variables will
* be separated by ",". e.g. columns="0,1,2;5;3;4"
*/
class ColumnarAdapter(tsml: Tsml) extends AsciiAdapter(tsml) {
lazy val columnIndices: Seq[Array[Int]] = getProperty("columns") match {
case Some(s: String) => s.split(";").map(p => p.split(",").map(_.toInt))
case None => throw new RuntimeException("ColumnarAdapter requires 'columns' definition.")
}
override def extractValues(record: String): Seq[String] = {
//Note, trim record first to deal with leading white space.
val ss = splitAtDelim(record).filterNot(_.isEmpty)
if(ss.length <= columnIndices.flatten.max) List() //Ignore rows with fewer columns than those requested
else columnIndices.map(is => is.map(ss(_)).mkString(" ")) //append with " " for now since delimiter could be a regex
}
} | dlindhol/LaTiS | src/main/scala/latis/reader/tsml/ColumnarAdapter.scala | Scala | epl-1.0 | 1,076 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package controllers.renewal
import connectors.DataCacheConnector
import controllers.actions.SuccessfulAuthAction
import models.Country
import models.businessmatching.{BusinessActivities => BMBusinessActivities, _}
import models.registrationprogress.{Completed, Section}
import models.renewal._
import org.mockito.ArgumentCaptor
import org.mockito.Matchers.{eq => eqTo, _}
import org.mockito.Mockito._
import org.scalatestplus.mockito.MockitoSugar
import play.api.mvc.Call
import play.api.test.Helpers._
import services.{ProgressService, RenewalService, SectionsProvider}
import uk.gov.hmrc.http.cache.client.CacheMap
import utils.AmlsSpec
import views.html.renewal.summary
import scala.concurrent.Future
class SummaryControllerSpec extends AmlsSpec with MockitoSugar {
trait Fixture {
self =>
val request = addToken(authRequest)
val mockCacheMap = mock[CacheMap]
val emptyCache = CacheMap("", Map.empty)
lazy val mockDataCacheConnector = mock[DataCacheConnector]
lazy val mockRenewalService = mock[RenewalService]
lazy val mockProgressService = mock[ProgressService]
lazy val mockSectionsProvider = mock[SectionsProvider]
lazy val view = app.injector.instanceOf[summary]
val controller = new SummaryController(
dataCacheConnector = mockDataCacheConnector,
authAction = SuccessfulAuthAction, ds = commonDependencies,
renewalService = mockRenewalService, cc = mockMcc,
progressService = mockProgressService,
sectionsProvider = mockSectionsProvider,
summary = view
)
when {
mockSectionsProvider.sections(any[CacheMap])
} thenReturn Seq.empty[Section]
when {
mockRenewalService.getSection(any())(any(),any())
} thenReturn Future.successful(Section("", Completed, false, mock[Call]))
val renewalModel = Renewal(
Some(models.renewal.InvolvedInOtherYes("test")),
Some(BusinessTurnover.First),
Some(AMLSTurnover.First),
Some(AMPTurnover.First),
Some(CustomersOutsideIsUK(true)),
Some(CustomersOutsideUK(Some(Seq(Country("United Kingdom", "GB"))))),
Some(PercentageOfCashPaymentOver15000.First),
Some(CashPayments(CashPaymentsCustomerNotMet(true), Some(HowCashPaymentsReceived(PaymentMethods(true,true,Some("other")))))),
Some(TotalThroughput("01")),
Some(WhichCurrencies(Seq("EUR"),None,Some(MoneySources(None,None,None)))),
Some(TransactionsInLast12Months("1500")),
Some(SendTheLargestAmountsOfMoney(Seq(Country("us", "US")))),
Some(MostTransactions(Seq(Country("United Kingdom", "GB")))),
Some(CETransactionsInLast12Months("123")),
Some(FXTransactionsInLast12Months("12")),
false,
hasAccepted = true)
}
val mockCacheMap = mock[CacheMap]
val bmBusinessActivities = Some(BMBusinessActivities(Set(MoneyServiceBusiness, TrustAndCompanyServices, TelephonePaymentService)))
"Get" must {
"load the summary page when there is data in the renewal" in new Fixture {
when(mockDataCacheConnector.fetchAll(any())(any()))
.thenReturn(Future.successful(Some(mockCacheMap)))
when(mockCacheMap.getEntry[Renewal](Renewal.key))
.thenReturn(Some(Renewal(Some(models.renewal.InvolvedInOtherYes("test")))))
when(mockCacheMap.getEntry[BusinessMatching](BusinessMatching.key))
.thenReturn(Some(BusinessMatching(activities = bmBusinessActivities)))
val result = controller.get()(request)
status(result) must be(OK)
}
"redirect to the renewal progress page when section data is unavailable" in new Fixture {
when(mockDataCacheConnector.fetchAll(any())(any()))
.thenReturn(Future.successful(Some(emptyCache)))
val result = controller.get()(request)
status(result) must be(SEE_OTHER)
}
}
"POST" must {
"update the hasAccepted flag on the model" in new Fixture {
val cache = mock[CacheMap]
when {
controller.dataCacheConnector.fetch[Renewal](any(), any())(any(), any())
} thenReturn Future.successful(Some(renewalModel.copy(hasAccepted = false)))
when {
controller.dataCacheConnector.save[Renewal](any(), eqTo(Renewal.key), any())(any(), any())
} thenReturn Future.successful(cache)
val result = controller.post()(request)
status(result) mustBe SEE_OTHER
redirectLocation(result) mustBe Some(controllers.renewal.routes.RenewalProgressController.get.url)
val captor = ArgumentCaptor.forClass(classOf[Renewal])
verify(controller.dataCacheConnector).save[Renewal](any(), eqTo(Renewal.key), captor.capture())(any(), any())
captor.getValue.hasAccepted mustBe true
}
}
}
| hmrc/amls-frontend | test/controllers/renewal/SummaryControllerSpec.scala | Scala | apache-2.0 | 5,280 |
package io.github.electricmind
import java.io.ByteArrayInputStream
import io.github.electricmind.autocomplete.{Autocomplete, NGram2Probabilities, NGram2Words, NGrams, Vocabulary}
import org.scalatest.{FlatSpec, Matchers}
class testAutocomplete extends FlatSpec with Matchers {
val ac = new Autocomplete(0.8, 2, _ => true, identity)
val sample = """
Only a few flies was flying around a corpse at the morning.
"""
def n2ws = NGram2Words(vocabulary)
def vocabulary = Vocabulary(samplestream)
def samplestream = new ByteArrayInputStream(sample.getBytes);
def ngs = NGrams(n2ps.keySet)
def n2ps = NGram2Probabilities(NGram2Words(vocabulary))
"An n2ps" should "contain a,was" in {
n2ps("a") shouldEqual 0.032 +- 0.001
n2ps("was") shouldEqual 0.0081 +- 0.001
}
"An autocomplete" should "select words with 80% probability" in {
println(ac.select(
n2ps, ngs).toList)
ac.select(n2ps, ngs) should contain allOf("e", "n", "a", "s", "f")
}
it should "make a map from ngrams" in {
ac.ngram2Words(List("a", "was"), n2ws) should contain only(
("a", Set("around", "a", "was", "at")),
("was", Set("was")))
}
it should "make ngrams" in {
ac.ngrams(n2ps, n2ws, ngs) should contain allOf("ew", "mo")
}
} | electricmind/autocomplete | src/test/scala/autocomplete/testAutocomplete.scala | Scala | apache-2.0 | 1,294 |
// Copyright 2015 Ricardo Gladwell.
// Licensed under the GNU Affero General Public License.
// See the LICENSE file for more information.
package com.is_hosted_by.api
import org.specs2.mutable.{After, Specification}
import dispatch.classic._
import java.net.InetAddress
import unfiltered.specs2.netty.Served
import org.specs2.specification.Scope
import unfiltered.specs2.Hosted
import javax.servlet.Filter
import org.specs2.mock.Mockito
import scala.io.Source
import org.specs2.matcher.XmlMatchers
import scala.xml.XML
import org.ccil.cowan.tagsoup.jaxp.SAXFactoryImpl
import java.net.URLEncoder
import test.TestConfiguration
import test.MockHtmlViews
import scala.concurrent.Future
import io.netty.channel.ChannelHandler
import java.net.UnknownHostException
import net.IpPrefix
import scala.concurrent.ExecutionContext
object ApiSpec extends Specification with Mocks with XmlMatchers {
import dispatch._
val hostedIpAddress = mock[InetAddress]
val unhostedIpAddress = mock[InetAddress]
trait MockNetworks extends Networks {
import scala.concurrent.ExecutionContext.Implicits.global
val networksMock = mock[() => Future[Seq[Network]]]
override def networks()(implicit executor: ExecutionContext) = networksMock()
networksMock() returns Future(Seq(mockNetwork))
object mockNetwork extends Network with MockDns {
case class MockIpPrefix(range: InetAddress) extends IpPrefix {
def inRange(address: InetAddress): Boolean = (address == range)
}
override val name = "mock-network"
override val ipRanges = Seq(MockIpPrefix(hostedIpAddress))
resolve(anyString) returns Future{ throw new RuntimeException("malformed input") }
resolve("hosted") returns Future{ hostedIpAddress }
resolve("unhosted") returns Future{ unhostedIpAddress }
resolve("unknown") returns Future{ throw new UnknownHostException("unknown host") }
}
}
trait ServedScope extends Hosted with Scope with Cors with After {
this: ChannelHandler =>
import unfiltered.netty._
lazy val server = Server.http(port).handler(cors).handler(this)
server.start()
def after = {
server.stop()
server.destroy()
}
}
trait TestApiScope extends Api
with MockHtmlViews
with MockNetworks
with TestConfiguration
with ServedScope {
def endpoint = url(s"http://localhost:$port")
}
"The HTTP API index endpoint" should {
"on GET request" in {
"return OK response" in new TestApiScope {
status(endpoint) must_== 200
}
"allow cross-origin requests" in new TestApiScope {
headers(endpoint <:< Map("Origin" -> "http://localhost")) must havePair("Access-Control-Allow-Origin" -> "*")
}
}
}
"The HTTP API address lookup endpoint" should {
"on network lookup" in {
"return OK response" in new TestApiScope {
status(endpoint / "?address=unhosted") must_== 200
}
"return hosted view for hosted address" in new TestApiScope {
html(body(endpoint / "?address=hosted")) must \\\\("span", "id" -> "is-aws") \\> "true"
}
"return network name for hosted address" in new TestApiScope {
html(body(endpoint / "?address=hosted")) must \\\\("span", "id" -> "network-name") \\> "mock-network"
}
"return unhosted view for unhosted address" in new TestApiScope {
html(body(endpoint / "?address=unhosted")) must \\\\("span", "id" -> "is-aws") \\> "false"
}
"return error for error on DNS lookup" in new TestApiScope {
status(endpoint / "?address=error") must_== 500
}
"return error view for error on DNS lookup" in new TestApiScope {
html(body(endpoint / "?address=error")) must \\\\("div", "id" -> "error")
}
"return error view for error on aquiring network IP range" in new TestApiScope {
networksMock() returns Future{ throw new RuntimeException("mock exception") }
html(body(endpoint / "?address=unhosted")) must \\\\("div", "id" -> "error")
}
"allow cross-origin requests" in new TestApiScope {
headers(endpoint / "?address=unhosted" <:< Map("Origin" -> "http://localhost")) must havePair("Access-Control-Allow-Origin" -> "*")
}
"handle URLs" in new TestApiScope {
val encoded = URLEncoder.encode("http://hosted:8080/path?name=value", "UTF-8");
html(body(endpoint / s"?address=$encoded")) must \\\\("span", "id" -> "is-aws") \\> "true"
}
"return not found for unknown DNS lookup" in new TestApiScope {
status(endpoint / "?address=unknown") must_== 404
}
"return DNS record does not exist validation message for non-existant DNS record" in new TestApiScope {
html(body(endpoint / "?address=unknown")) must \\\\ ("p", "class" -> "dns-does-not-exist")
}
"return query result for non-existant DNS record" in new TestApiScope {
html(body(endpoint / s"?address=unknown")) must \\\\("span", "id" -> "is-aws") \\> "false"
}
object negativeMockNetwork extends Network with MockDns {
case class FailingMockIpPrefix() extends IpPrefix {
def inRange(address: InetAddress): Boolean = false
}
override val ipRanges = Seq(FailingMockIpPrefix())
override val name = "FailingMock Network"
resolve("hosted") returns Future{ hostedIpAddress }
}
"return success if only last result is positive" in new TestApiScope {
networksMock() returns Future{ Seq( negativeMockNetwork, mockNetwork ) }
html(body(endpoint / "?address=hosted")) must \\\\("span", "id" -> "is-aws") \\> "true"
}
}
}
def GET(request: Request): Unit = status(request)
def status(request: Request) = new Http x (request as_str) {
case (code, _, _, _) => code
}
def body(request: Request): String = new Http x (request as_str) {
case (_, response, _, _) => {
val content = response.getEntity.getContent
Source.fromInputStream(content).getLines().mkString("\\n")
}
}
def html(content: String) = {
val parser = XML.withSAXParser(new SAXFactoryImpl().newSAXParser())
parser.loadString(content)
}
def headers(request: Request) = new Http x (request as_str) {
case (_, response, _, _) => {
val headers = response.getAllHeaders.toList
(headers.map { header => (header.getName, header.getValue) }).toMap
}
}
}
| rgladwell/is-aws-api | src/test/scala/com/is_hosted_by/api/ApiSpec.scala | Scala | agpl-3.0 | 6,429 |
package yuima.algovis.maze.gen
import yuima.algovis.maze.core.FieldType._
import yuima.algovis.maze.core.{Direction, MazeOnGeneration}
import scala.annotation.tailrec
import scala.util.Random
/** A maze generator with depth-first search algorithm.
*
* @author Yuichiroh Matsubayashi
* Created on 15/08/24.
*/
object DepthFirst extends GenerationAlgorithm {
/** generates a maze with depth-first search algorithm. */
def apply(implicit maze: MazeOnGeneration) = {
val m = maze.base
generate()
@tailrec
def generate(): Unit = {
val passage = createInitialPassage
createPassages(passage)
maze(m.start.x, m.start.y) = START
val success = createGoal
if (!success) {
(0 to 2 * m.width).foreach { i =>
(0 to 2 * m.height).foreach { j => maze(i, j) = WALL }
}
generate()
}
}
def createInitialPassage = {
val x = Random.nextInt(m.width - 1) * 2 + 1
val y = Random.nextInt(m.height - 1) * 2 + 1
maze(x, y) = PASSAGE
(x, y)
}
@tailrec
def createPassages(passage: (Int, Int), candidates: List[(Int, Int, Direction.Value)] = Nil): Unit = {
val (x, y) = passage
val cs = m.Position.adjacent(WALL)(x, y)
.collect { case w if m.Position.opposite(WALL).tupled(w).isDefined => w }
Random.shuffle(cs) ::: candidates match {
case head :: tail =>
val p = createPassage(head)
createPassages(p, tail.filter(m.Position.opposite(WALL).tupled(_).isDefined))
case Nil =>
}
}
def createPassage(wall: (Int, Int, Direction.Value)) = {
val (x, y, dir) = wall
val passage@(x2, y2) = m.Position.opposite(WALL)(x, y, dir).get
maze(x, y) = PASSAGE
maze(x2, y2) = PASSAGE
passage
}
}
}
| Yuichiroh/algovis | src/main/scala/yuima/algovis/maze/gen/DepthFirst.scala | Scala | mit | 1,829 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.common
import common.ConcurrencyHelpers
import org.apache.openwhisk.utils.ExecutionContextFactory
import org.junit.runner.RunWith
import org.scalatest.FlatSpec
import org.scalatest.Matchers
import org.scalatest.junit.JUnitRunner
import scala.concurrent.duration.DurationInt
@RunWith(classOf[JUnitRunner])
class NestedSemaphoreTests extends FlatSpec with Matchers with ConcurrencyHelpers {
// use an infinite thread pool to allow for maximum concurrency
implicit val executionContext = ExecutionContextFactory.makeCachedThreadPoolExecutionContext()
val acquireTimeout = 1.minute
behavior of "NestedSemaphore"
it should "allow acquire of concurrency permits before acquire of memory permits" in {
val s = new NestedSemaphore[String](20)
s.availablePermits shouldBe 20
val actionId = "action1"
val actionConcurrency = 5
val actionMemory = 3
//use all concurrency on a single slot
concurrently(5, acquireTimeout) {
s.tryAcquireConcurrent(actionId, actionConcurrency, actionMemory)
} should contain only true
s.availablePermits shouldBe 20 - 3 //we used a single container (memory == 3)
s.concurrentState(actionId).availablePermits shouldBe 0
//use up all the remaining memory (17) and concurrency slots (17 / 3 * 5 = 25)
concurrently(25, acquireTimeout) {
s.tryAcquireConcurrent(actionId, actionConcurrency, actionMemory)
} should contain only true
s.availablePermits shouldBe 2 //we used 18 (20/3 = 6, 6*3=18)
s.concurrentState(actionId).availablePermits shouldBe 0
s.tryAcquireConcurrent("action1", actionConcurrency, actionMemory) shouldBe false
}
it should "not give away more permits even under concurrent load" in {
// 100 iterations of this test
(0 until 100).foreach { _ =>
val s = new NestedSemaphore(32)
// try to acquire more permits than allowed in parallel
val acquires = concurrently(64, acquireTimeout)(s.tryAcquire())
val result = Seq.fill(32)(true) ++ Seq.fill(32)(false)
acquires should contain theSameElementsAs result
}
}
}
| jeremiaswerner/openwhisk | tests/src/test/scala/org/apache/openwhisk/common/NestedSemaphoreTests.scala | Scala | apache-2.0 | 2,917 |
import scala.reflect.runtime.universe._
import scala.reflect.runtime.{currentMirror => cm}
object Test extends dotty.runtime.LegacyApp {
class A {
private[this] var x: Int = 42
}
val a = new A
val im: InstanceMirror = cm.reflect(a)
val cs = im.symbol
val f = cs.info.decl(TermName("x")).asTerm
val fm: FieldMirror = im.reflectField(f)
println(fm.symbol.isVar)
println(fm.get)
fm.set(2)
println(fm.get)
}
| folone/dotty | tests/pending/run/reflection-fieldmirror-privatethis.scala | Scala | bsd-3-clause | 433 |
/**
* Licensed to Big Data Genomics (BDG) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The BDG licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bdgenomics.utils.instrumentation
import org.bdgenomics.utils.misc.SparkFunSuite
import org.scalatest.concurrent.{ Eventually, IntegrationPatience }
class MetricsListenerSuite extends SparkFunSuite with Eventually with IntegrationPatience {
sparkTest("Listener accumulates metrics when registered with Spark") {
val metrics = new RecordedMetrics()
val listener = new MetricsListener(metrics)
sc.addSparkListener(listener)
// Doesn't really matter what we do here -- we just need to do something that spawns some tasks
val accumulator = sc.accumulator(0)
sc.parallelize(Array(1, 2, 3, 4, 5, 6, 7, 8), numSlices = 8).foreach(x => {
accumulator += x
})
eventually {
assert(accumulator.value === 1 + 2 + 3 + 4 + 5 + 6 + 7 + 8)
}
eventually {
// There's nothing sensible we can assert based on the timings, so just assert based on the counts
assert(metrics.sparkMetrics.duration.getOverallTimings.getCount === 8)
assert(metrics.sparkMetrics.executorRunTime.getOverallTimings.getCount === 8)
assert(metrics.sparkMetrics.executorDeserializeTime.getOverallTimings.getCount === 8)
assert(metrics.sparkMetrics.resultSerializationTime.getOverallTimings.getCount === 8)
assert(metrics.sparkMetrics.stageTimes.iterator.hasNext)
}
}
}
| tdanford/bdg-utils | utils-metrics/src/test/scala/org/bdgenomics/utils/instrumentation/MetricsListenerSuite.scala | Scala | apache-2.0 | 2,131 |
object Test extends App {
def foo(x: Int)(y: Int): Unit = macro Impls.foo
foo(40)(2)
} | som-snytt/dotty | tests/disabled/macro/run/macro-expand-multiple-arglists/Macros_Test_2.scala | Scala | apache-2.0 | 90 |
package japgolly.scalajs.react.test
import scala.scalajs.js
import scala.scalajs.js.Dynamic
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
// NOTE: Do not use UndefOr for arguments below; undefined causes Phantom-bloody-JS to crash.
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
object SimEvent {
case class Change(value : String = "",
checked : js.UndefOr[Boolean] = js.undefined,
defaultPrevented: Boolean = false) {
def toJs: js.Object = {
val target = Dynamic.literal(
"value" -> value,
"checked" -> checked,
"defaultPrevented" -> defaultPrevented)
val o = Dynamic.literal("target" -> target)
o
}
def simulate(t: ReactOrDomNode) = Simulate.change(t, this)
def simulation = Simulation.change(this)
}
object Change {
implicit def autoToJsObject(d: Change): js.Object = d.toJs
}
// ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
case class Keyboard(key : String = "",
location : Double = 0,
altKey : Boolean = false,
ctrlKey : Boolean = false,
metaKey : Boolean = false,
shiftKey : Boolean = false,
repeat : Boolean = false,
locale : String = "",
keyCode : Int = 0,
charCode : Int = 0,
which : Int = 0,
defaultPrevented: Boolean = false) {
def alt = copy(altKey = true)
def ctrl = copy(ctrlKey = true)
def meta = copy(metaKey = true)
def shift = copy(shiftKey = true)
def desc: String = {
var s = key
if (s.isEmpty) s = s"($keyCode)"
if (shiftKey ) s = "Shift-" + s
if (altKey ) s = "Alt-" + s
if (ctrlKey ) s = "Ctrl-" + s
if (metaKey ) s = "Meta-" + s
s
}
def toJs: js.Object = {
val o = Dynamic.literal()
o.updateDynamic("key" )(key )
o.updateDynamic("location" )(location )
o.updateDynamic("altKey" )(altKey )
o.updateDynamic("ctrlKey" )(ctrlKey )
o.updateDynamic("metaKey" )(metaKey )
o.updateDynamic("shiftKey" )(shiftKey )
o.updateDynamic("repeat" )(repeat )
o.updateDynamic("locale" )(locale )
o.updateDynamic("keyCode" )(keyCode )
o.updateDynamic("charCode" )(charCode )
o.updateDynamic("which" )(which )
o.updateDynamic("defaultPrevented")(defaultPrevented)
o
}
def simulateKeyDown (t: ReactOrDomNode): Unit = Simulate.keyDown (t, this)
def simulateKeyPress (t: ReactOrDomNode): Unit = Simulate.keyPress(t, this)
def simulateKeyUp (t: ReactOrDomNode): Unit = Simulate.keyUp (t, this)
def simulateKeyDownUp (t: ReactOrDomNode): Unit = {simulateKeyDown(t); simulateKeyUp(t)}
def simulateKeyDownPressUp(t: ReactOrDomNode): Unit = {simulateKeyDown(t); simulateKeyPress(t); simulateKeyUp(t)}
def simulationKeyDown = Simulation.keyDown(this)
def simulationKeyPress = Simulation.keyPress(this)
def simulationKeyUp = Simulation.keyUp(this)
def simulationKeyDownUp = simulationKeyDown >> simulationKeyUp
def simulationKeyDownPressUp = simulationKeyDown >> simulationKeyPress >> simulationKeyUp
}
object Keyboard {
implicit def autoToJsObject(d: Keyboard): js.Object = d.toJs
import org.scalajs.dom.ext.{KeyCode, KeyValue}
def Alt : Keyboard = apply(key = KeyValue.Alt , keyCode = KeyCode.Alt)
def Ctrl : Keyboard = apply(key = KeyValue.Control , keyCode = KeyCode.Ctrl)
def CapsLock : Keyboard = apply(key = KeyValue.CapsLock , keyCode = KeyCode.CapsLock)
def Shift : Keyboard = apply(key = KeyValue.Shift , keyCode = KeyCode.Shift)
def Backspace: Keyboard = apply(key = KeyValue.Backspace , keyCode = KeyCode.Backspace)
def Enter : Keyboard = apply(key = KeyValue.Enter , keyCode = KeyCode.Enter)
def Escape : Keyboard = apply(key = KeyValue.Escape , keyCode = KeyCode.Escape)
def Pause : Keyboard = apply(key = KeyValue.Pause , keyCode = KeyCode.Pause)
def Space : Keyboard = apply(key = KeyValue.Spacebar , keyCode = KeyCode.Space)
def Tab : Keyboard = apply(key = KeyValue.Tab , keyCode = KeyCode.Tab)
def Down : Keyboard = apply(key = KeyValue.ArrowDown , keyCode = KeyCode.Down)
def Left : Keyboard = apply(key = KeyValue.ArrowLeft , keyCode = KeyCode.Left)
def Right : Keyboard = apply(key = KeyValue.ArrowRight, keyCode = KeyCode.Right)
def Up : Keyboard = apply(key = KeyValue.ArrowUp , keyCode = KeyCode.Up)
def Insert : Keyboard = apply(key = KeyValue.Insert , keyCode = KeyCode.Insert)
def Delete : Keyboard = apply(key = KeyValue.Delete , keyCode = KeyCode.Delete)
def Home : Keyboard = apply(key = KeyValue.Home , keyCode = KeyCode.Home)
def End : Keyboard = apply(key = KeyValue.End , keyCode = KeyCode.End)
def PageUp : Keyboard = apply(key = KeyValue.PageUp , keyCode = KeyCode.PageUp)
def PageDown : Keyboard = apply(key = KeyValue.PageDown , keyCode = KeyCode.PageDown)
def F1 : Keyboard = apply(key = KeyValue.F1 , keyCode = KeyCode.F1)
def F2 : Keyboard = apply(key = KeyValue.F2 , keyCode = KeyCode.F2)
def F3 : Keyboard = apply(key = KeyValue.F3 , keyCode = KeyCode.F3)
def F4 : Keyboard = apply(key = KeyValue.F4 , keyCode = KeyCode.F4)
def F5 : Keyboard = apply(key = KeyValue.F5 , keyCode = KeyCode.F5)
def F6 : Keyboard = apply(key = KeyValue.F6 , keyCode = KeyCode.F6)
def F7 : Keyboard = apply(key = KeyValue.F7 , keyCode = KeyCode.F7)
def F8 : Keyboard = apply(key = KeyValue.F8 , keyCode = KeyCode.F8)
def F9 : Keyboard = apply(key = KeyValue.F9 , keyCode = KeyCode.F9)
def F10 : Keyboard = apply(key = KeyValue.F10 , keyCode = KeyCode.F10)
def F11 : Keyboard = apply(key = KeyValue.F11 , keyCode = KeyCode.F11)
def F12 : Keyboard = apply(key = KeyValue.F12 , keyCode = KeyCode.F12)
def Num0 : Keyboard = apply(key = "0" , keyCode = KeyCode.Num0)
def Num1 : Keyboard = apply(key = "1" , keyCode = KeyCode.Num1)
def Num2 : Keyboard = apply(key = "2" , keyCode = KeyCode.Num2)
def Num3 : Keyboard = apply(key = "3" , keyCode = KeyCode.Num3)
def Num4 : Keyboard = apply(key = "4" , keyCode = KeyCode.Num4)
def Num5 : Keyboard = apply(key = "5" , keyCode = KeyCode.Num5)
def Num6 : Keyboard = apply(key = "6" , keyCode = KeyCode.Num6)
def Num7 : Keyboard = apply(key = "7" , keyCode = KeyCode.Num7)
def Num8 : Keyboard = apply(key = "8" , keyCode = KeyCode.Num8)
def Num9 : Keyboard = apply(key = "9" , keyCode = KeyCode.Num9)
def A : Keyboard = apply(key = "A" , keyCode = KeyCode.A)
def B : Keyboard = apply(key = "B" , keyCode = KeyCode.B)
def C : Keyboard = apply(key = "C" , keyCode = KeyCode.C)
def D : Keyboard = apply(key = "D" , keyCode = KeyCode.D)
def E : Keyboard = apply(key = "E" , keyCode = KeyCode.E)
def F : Keyboard = apply(key = "F" , keyCode = KeyCode.F)
def G : Keyboard = apply(key = "G" , keyCode = KeyCode.G)
def H : Keyboard = apply(key = "H" , keyCode = KeyCode.H)
def I : Keyboard = apply(key = "I" , keyCode = KeyCode.I)
def J : Keyboard = apply(key = "J" , keyCode = KeyCode.J)
def K : Keyboard = apply(key = "K" , keyCode = KeyCode.K)
def L : Keyboard = apply(key = "L" , keyCode = KeyCode.L)
def M : Keyboard = apply(key = "M" , keyCode = KeyCode.M)
def N : Keyboard = apply(key = "N" , keyCode = KeyCode.N)
def O : Keyboard = apply(key = "O" , keyCode = KeyCode.O)
def P : Keyboard = apply(key = "P" , keyCode = KeyCode.P)
def Q : Keyboard = apply(key = "Q" , keyCode = KeyCode.Q)
def R : Keyboard = apply(key = "R" , keyCode = KeyCode.R)
def S : Keyboard = apply(key = "S" , keyCode = KeyCode.S)
def T : Keyboard = apply(key = "T" , keyCode = KeyCode.T)
def U : Keyboard = apply(key = "U" , keyCode = KeyCode.U)
def V : Keyboard = apply(key = "V" , keyCode = KeyCode.V)
def W : Keyboard = apply(key = "W" , keyCode = KeyCode.W)
def X : Keyboard = apply(key = "X" , keyCode = KeyCode.X)
def Y : Keyboard = apply(key = "Y" , keyCode = KeyCode.Y)
def Z : Keyboard = apply(key = "Z" , keyCode = KeyCode.Z)
def a : Keyboard = apply(key = "a" , keyCode = KeyCode.A)
def b : Keyboard = apply(key = "b" , keyCode = KeyCode.B)
def c : Keyboard = apply(key = "c" , keyCode = KeyCode.C)
def d : Keyboard = apply(key = "d" , keyCode = KeyCode.D)
def e : Keyboard = apply(key = "e" , keyCode = KeyCode.E)
def f : Keyboard = apply(key = "f" , keyCode = KeyCode.F)
def g : Keyboard = apply(key = "g" , keyCode = KeyCode.G)
def h : Keyboard = apply(key = "h" , keyCode = KeyCode.H)
def i : Keyboard = apply(key = "i" , keyCode = KeyCode.I)
def j : Keyboard = apply(key = "j" , keyCode = KeyCode.J)
def k : Keyboard = apply(key = "k" , keyCode = KeyCode.K)
def l : Keyboard = apply(key = "l" , keyCode = KeyCode.L)
def m : Keyboard = apply(key = "m" , keyCode = KeyCode.M)
def n : Keyboard = apply(key = "n" , keyCode = KeyCode.N)
def o : Keyboard = apply(key = "o" , keyCode = KeyCode.O)
def p : Keyboard = apply(key = "p" , keyCode = KeyCode.P)
def q : Keyboard = apply(key = "q" , keyCode = KeyCode.Q)
def r : Keyboard = apply(key = "r" , keyCode = KeyCode.R)
def s : Keyboard = apply(key = "s" , keyCode = KeyCode.S)
def t : Keyboard = apply(key = "t" , keyCode = KeyCode.T)
def u : Keyboard = apply(key = "u" , keyCode = KeyCode.U)
def v : Keyboard = apply(key = "v" , keyCode = KeyCode.V)
def w : Keyboard = apply(key = "w" , keyCode = KeyCode.W)
def x : Keyboard = apply(key = "x" , keyCode = KeyCode.X)
def y : Keyboard = apply(key = "y" , keyCode = KeyCode.Y)
def z : Keyboard = apply(key = "z" , keyCode = KeyCode.Z)
}
// ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
case class Mouse(screenX : Double = 0,
screenY : Double = 0,
clientX : Double = 0,
clientY : Double = 0,
pageX : Double = 0,
pageY : Double = 0,
altKey : Boolean = false,
ctrlKey : Boolean = false,
metaKey : Boolean = false,
shiftKey : Boolean = false,
button : Int = 0,
buttons : Int = 0,
defaultPrevented: Boolean = false) {
def alt = copy(altKey = true)
def ctrl = copy(ctrlKey = true)
def meta = copy(metaKey = true)
def shift = copy(shiftKey = true)
def toJs: js.Object = {
val o = Dynamic.literal()
o.updateDynamic("screenX" )(screenX )
o.updateDynamic("screenY" )(screenY )
o.updateDynamic("clientX" )(clientX )
o.updateDynamic("clientY" )(clientY )
o.updateDynamic("pageX" )(pageX )
o.updateDynamic("pageY" )(pageY )
o.updateDynamic("altKey" )(altKey )
o.updateDynamic("ctrlKey" )(ctrlKey )
o.updateDynamic("metaKey" )(metaKey )
o.updateDynamic("shiftKey" )(shiftKey )
o.updateDynamic("button" )(button )
o.updateDynamic("buttons" )(buttons )
o.updateDynamic("defaultPrevented")(defaultPrevented)
o
}
def simulateDrag (t: ReactOrDomNode) = Simulate.drag (t, this)
def simulateDragEnd (t: ReactOrDomNode) = Simulate.dragEnd (t, this)
def simulateDragEnter (t: ReactOrDomNode) = Simulate.dragEnter (t, this)
def simulateDragExit (t: ReactOrDomNode) = Simulate.dragExit (t, this)
def simulateDragLeave (t: ReactOrDomNode) = Simulate.dragLeave (t, this)
def simulateDragOver (t: ReactOrDomNode) = Simulate.dragOver (t, this)
def simulateDragStart (t: ReactOrDomNode) = Simulate.dragStart (t, this)
def simulateDrop (t: ReactOrDomNode) = Simulate.drop (t, this)
def simulateMouseDown (t: ReactOrDomNode) = Simulate.mouseDown (t, this)
def simulateMouseEnter(t: ReactOrDomNode) = Simulate.mouseEnter(t, this)
def simulateMouseLeave(t: ReactOrDomNode) = Simulate.mouseLeave(t, this)
def simulateMouseMove (t: ReactOrDomNode) = Simulate.mouseMove (t, this)
def simulateMouseOut (t: ReactOrDomNode) = Simulate.mouseOut (t, this)
def simulateMouseOver (t: ReactOrDomNode) = Simulate.mouseOver (t, this)
def simulateMouseUp (t: ReactOrDomNode) = Simulate.mouseUp (t, this)
def simulateWheel (t: ReactOrDomNode) = Simulate.wheel (t, this)
def simulationDrag = Simulation.drag (this)
def simulationDragEnd = Simulation.dragEnd (this)
def simulationDragEnter = Simulation.dragEnter (this)
def simulationDragExit = Simulation.dragExit (this)
def simulationDragLeave = Simulation.dragLeave (this)
def simulationDragOver = Simulation.dragOver (this)
def simulationDragStart = Simulation.dragStart (this)
def simulationMouseDown = Simulation.mouseDown (this)
def simulationMouseEnter = Simulation.mouseEnter(this)
def simulationMouseLeave = Simulation.mouseLeave(this)
def simulationMouseMove = Simulation.mouseMove (this)
def simulationMouseOut = Simulation.mouseOut (this)
def simulationMouseOver = Simulation.mouseOver (this)
def simulationMouseUp = Simulation.mouseUp (this)
def simulationWheel = Simulation.wheel (this)
}
object Mouse {
implicit def autoToJsObject(d: Mouse): js.Object = d.toJs
}
}
| matthughes/scalajs-react | test/src/main/scala/japgolly/scalajs/react/test/SimEvent.scala | Scala | apache-2.0 | 16,549 |
/*
* Copyright (c) 2012-14 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless.examples
import shapeless._
/*
* Examples of optic (ie. lens/prism) usage.
*
* @author Miles Sabin
*/
package opticDemoDatatypes {
case class Address(street : String, city : String)
case class Person(name : String, age : Int, address : Address)
sealed trait Tree[T]
case class Node[T](left: Tree[T], right: Tree[T]) extends Tree[T]
case class Leaf[T](value: T) extends Tree[T]
case class Foo(i: Int, s: String)
case class Bar(i: Int, b: Boolean)
}
object OpticExamples extends App {
import opticDemoDatatypes._
// 1. Basic nested case classes
val mary = Person("Mary", 32, Address("Southover Street", "Brighton"))
val ageLens = lens[Person].age
val streetLens = lens[Person].address.street
val age = ageLens.get(mary)
assert(age == 32)
val street = streetLens.get(mary)
assert(street == "Southover Street")
val mary2 = streetLens.set(mary)("Montpelier Road")
val mary3 = ageLens.modify(mary2)(_+1)
assert(mary3 == Person("Mary", 33, Address("Montpelier Road", "Brighton")))
// 2. Sealed family (sum of products)
val l: Either[Int, Boolean] = Left(23)
val r: Either[Int, Boolean] = Right(false)
val lExplicit = prism[Either[Int, Boolean]][Left[Int, Boolean]].a
val rExplicit = prism[Either[Int, Boolean]][Right[Int, Boolean]].b
val ol = lExplicit.get(l)
assert(ol == Some(23))
val or = rExplicit.get(l)
assert(or == None)
// 3. Sealed family with coproduct branch inferred from product selectors
val lInferred = prism[Either[Int, Boolean]].a
val rInferred = prism[Either[Int, Boolean]].b
val ol2 = lInferred.get(l)
assert(ol2 == Some(23))
val or2 = rInferred.get(l)
assert(or2 == None)
// 4. Sealed, recursive family, coproduct branches inferred
val t1 = Node(Node(Leaf(1), Leaf(2)), Leaf(3))
val t2 = Node(Leaf(4), Node(Leaf(5), Leaf(6)))
val lr = prism[Tree[Int]].left.right.value
val rr = prism[Tree[Int]].right.right.value
val lrv1 = lr.get(t1)
assert(lrv1 == Some(2))
val lrv2 = lr.get(t2)
assert(lrv2 == None)
val t1b = lr.set(t1)(23)
assert(t1b == Node(Node(Leaf(1), Leaf(23)), Leaf(3)))
val t2b = rr.set(t2)(13)
assert(t2b == Node(Leaf(4), Node(Leaf(5), Leaf(13))))
// 5. Optic inferred from initial data type and a path
def update[T, E](t: T)(e: E)(implicit mkLens: p.Lens[T, E]): T = mkLens().set(t)(e)
val p = ^.i // a path selecting the product element labelled 'i'
// Unrelated types with a common field i: Int
val foo = Foo(23, "foo")
val bar = Bar(13, true)
// Typesafe polymorphic update via lens inference
val foo2 = update(foo)(11)
assert(foo2 == Foo(11, "foo"))
val bar2 = update(bar)(7)
assert(bar2 == Bar(7, true))
}
| mandubian/shapeless | examples/src/main/scala/shapeless/examples/optics.scala | Scala | apache-2.0 | 3,314 |
package org.scaladebugger.api.lowlevel.monitors
import org.scaladebugger.api.lowlevel.requests.JDIRequestArgument
import org.scaladebugger.api.utils.PendingActionManager
import scala.util.{Success, Try}
/**
* Provides pending monitor waited capabilities to an existing
* monitor waited manager.
*/
trait PendingMonitorWaitedSupport extends PendingMonitorWaitedSupportLike {
/**
* Represents the manager used to store pending monitor waited requests and
* process them later.
*/
protected val pendingActionManager: PendingActionManager[MonitorWaitedRequestInfo]
/**
* Processes all pending monitor waited requests.
*
* @return The collection of successfully-processed monitor waited requests
*/
override def processAllPendingMonitorWaitedRequests(): Seq[MonitorWaitedRequestInfo] = {
pendingActionManager.processAllActions().map(_.data)
}
/**
* Retrieves a list of pending monitor waited requests.
*
* @return The collection of monitor waited requests
*/
override def pendingMonitorWaitedRequests: Seq[MonitorWaitedRequestInfo] = {
pendingActionManager.getPendingActionData(_ => true)
}
/**
* Creates a new monitor waited request.
*
* @param requestId The id of the request used to retrieve and delete it
* @param extraArguments Any additional arguments to provide to the request
*
* @return Success(id) if successful, otherwise Failure
*/
abstract override def createMonitorWaitedRequestWithId(
requestId: String,
extraArguments: JDIRequestArgument*
): Try[String] = {
def createMonitorWaitedRequest() = super.createMonitorWaitedRequestWithId(
requestId,
extraArguments: _*
)
val result = createMonitorWaitedRequest()
// If failed, add as pending
result.recoverWith {
case _: Throwable if isPendingSupportEnabled =>
pendingActionManager.addPendingActionWithId(
requestId,
MonitorWaitedRequestInfo(requestId, isPending = true, extraArguments),
() => createMonitorWaitedRequest().get
)
Success(requestId)
case _: Throwable => result
}
}
/**
* Removes the specified monitor waited request.
*
* @param id The id of the Class Unload Request
*
* @return True if the monitor waited request was removed (if it existed),
* otherwise false
*/
abstract override def removeMonitorWaitedRequest(id: String): Boolean = {
val result = super.removeMonitorWaitedRequest(id)
val pendingResult = pendingActionManager.removePendingActionsWithId(id)
// True if we removed a real monitor waited request or any
// pending monitor waited request
result || pendingResult.getOrElse(Nil).nonEmpty
}
}
| ensime/scala-debugger | scala-debugger-api/src/main/scala/org/scaladebugger/api/lowlevel/monitors/PendingMonitorWaitedSupport.scala | Scala | apache-2.0 | 2,740 |
package cuando.time
import play.api.libs.json.JsSuccess
import play.api.libs.json.Reads
import play.api.libs.json.Writes
import play.api.libs.json.JsResult
import play.api.libs.json.JsValue
import continuum.Interval
import org.joda.time.DateTime
import play.api.libs.json.Json
import play.api.libs.json.JsArray
import play.api.libs.json.JsError
case class TemporalContext(tRecs: Set[TemporalRecord]) {
def union(that: TemporalContext): TemporalContext = {
TemporalContext(this.tRecs union that.tRecs)
}
}
object TemporalContext {
implicit val tcReads: Reads[TemporalContext] = new Reads[TemporalContext] {
def reads(json: JsValue): JsResult[TemporalContext] = {
json match {
case JsArray(arr) =>
val tRecs = arr.map { elem => elem.as[TemporalRecord] }.toSet
JsSuccess(TemporalContext(tRecs))
case _ => JsError("error.record.expected.JsArray")
}
}
}
implicit val tcWrites: Writes[TemporalContext] = new Writes[TemporalContext] {
def writes(tc: TemporalContext): JsArray = {
JsArray(tc.tRecs.map { temporalRec => Json.toJson(temporalRec) }.toSeq)
}
}
}
| cuando-db/cuando-db | src/main/scala/cuando/time/TemporalContext.scala | Scala | mit | 1,141 |
package com.imaginea.activegrid.core.models
import com.imaginea.activegrid.core.models.{ApplicationTier => AppTier, Neo4jRepository => Neo}
import com.imaginea.activegrid.core.utils.ActiveGridUtils
import com.typesafe.scalalogging.Logger
import org.neo4j.graphdb.Node
import org.slf4j.LoggerFactory
/**
* Created by sivag on 23/11/16.
*/
case class ApplicationTier(override val id: Option[Long],
name: String,
description: String,
instances: List[Instance],
apmServer: Option[APMServerDetails]) extends BaseEntity
object ApplicationTier {
val label = ApplicationTier.getClass.getSimpleName
val relationLabel = ActiveGridUtils.relationLbl(label)
def fromNeo4jGraph(nodeId: Long): Option[AppTier] = {
val logger = Logger(LoggerFactory.getLogger(getClass.getName))
val node = Neo.findNodeById(nodeId)
node.map {
appTier =>
//Reading properties
val map = Neo.getProperties(appTier, "name", "description")
//Fetching instances
val instances = Neo.getChildNodeIds(appTier.getId, Instance.relationLabel).flatMap {
id => Instance.fromNeo4jGraph(id)
}
// Fetching APM Server
val apmSrvr = Neo.getChildNodeId(appTier.getId, APMServerDetails.relationLabel).flatMap {
id => APMServerDetails.fromNeo4jGraph(id)
}
ApplicationTier(Some(nodeId), map("name").toString, map("description").toString, instances, apmSrvr)
}
}
implicit class ApplicationTierImpl(applicationTier: AppTier) extends Neo4jRep[AppTier] {
val logger = Logger(LoggerFactory.getLogger(getClass.getName))
override def toNeo4jGraph(appTier: AppTier): Node = {
logger.debug(s"In toGraph for Software: $appTier")
val map = Map("name" -> appTier.name, "description" -> appTier.description)
val appTierNode = Neo.saveEntity[AppTier](AppTier.label, appTier.id, map)
// Creating instances.
appTier.instances.map {
instance =>
val instnNode = instance.toNeo4jGraph(instance)
Neo.createRelation(Instance.relationLabel, appTierNode, instnNode)
}
// Creating APMServer
appTier.apmServer.map {
srvr =>
val srvrNod = srvr.toNeo4jGraph(srvr)
Neo.createRelation(APMServerDetails.relationLabel, appTierNode, srvrNod)
}
appTierNode
}
override def fromNeo4jGraph(nodeId: Long): Option[AppTier] = {
AppTier.fromNeo4jGraph(nodeId)
}
}
}
| eklavya/activeGrid | src/main/scala/com/imaginea/activegrid/core/models/ApplicationTier.scala | Scala | apache-2.0 | 2,565 |
package n4g
object Model {
type Id = String
type Timestamp = Int
case class File(repo: String, // repo origin url
path: String,
lines: Int,
name: String,
ext: String, // file extension (.scala, .js, ...)
purpose: String) // src vs. test
case class User(name: String,
email: String)
case class Commit(id: Id,
time: Timestamp,
user: User,
msg: String,
files: List[File],
parents: List[Id])
case class Change(id: Id,
time: Timestamp,
msg: String,
user: User,
file: File)
}
| by-dam/neo4git | src/main/scala/n4g/Model.scala | Scala | mit | 781 |
package com.solarmosaic.client.utilityApi.json
/**
* Stringified JSON.
*
* @param json The underlying JSON String.
*/
case class JsonString(json: String) {
override def toString = json
}
| solarmosaic/utility-api-client | src/main/scala/com/solarmosaic/client/utilityApi/json/JsonString.scala | Scala | mit | 194 |
/*
* Copyright 2010 Red Hat, Inc.
*
* Red Hat licenses this file to you under the Apache License, version
* 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package io.fabric8.webui.agents.jvm
import io.fabric8.api.Container
import io.fabric8.service.ContainerTemplate
import io.fabric8.webui.agents.{ManagementExtension, ManagementExtensionFactory}
import io.fabric8.webui.agents.{ManagementExtension, ManagementExtensionFactory}
import javax.ws.rs.{GET, Path}
import javax.management.ObjectName
import javax.management.openmbean.CompositeData
import io.fabric8.service.JmxTemplateSupport.JmxConnectorCallback
import javax.management.remote.JMXConnector
import io.fabric8.webui.BaseResource
object JVMAgentResource extends ManagementExtensionFactory {
def create(a: Container, jmx_username: String, jmx_password: String) = {
if (a.getJmxDomains.contains("java.lang")) {
Some(new JVMAgentResource(a, jmx_username, jmx_password))
} else {
None
}
}
}
class JVMAgentResource(val agent: Container, jmx_username: String, jmx_password: String) extends BaseResource with ManagementExtension {
private val template = agent_template(agent, jmx_username, jmx_password)
def id = "jvm"
@GET
override
def get = Array("metrics")
@GET
@Path("metrics")
def metrics = {
template.execute(new JmxConnectorCallback[JvmMetricsDTO] {
def doWithJmxConnector(connector: JMXConnector): JvmMetricsDTO = {
val mbean_server = connector.getMBeanServerConnection;
val rc = new JvmMetricsDTO
attempt(
rc.os_name =
mbean_server.getAttribute("java.lang:type=OperatingSystem", "Name").toString +
" " +
mbean_server.getAttribute("java.lang:type=OperatingSystem", "Version")
)
attempt(
rc.jvm_name =
mbean_server.getAttribute("java.lang:type=Runtime", "VmName").toString +
" (" +
mbean_server.getAttribute("java.lang:type=Runtime", "VmVendor") + ")"
)
implicit def to_object_name(value: String): ObjectName = new ObjectName(value)
implicit def to_long(value: AnyRef): Long = value.asInstanceOf[java.lang.Long].longValue()
implicit def to_int(value: AnyRef): Int = value.asInstanceOf[java.lang.Integer].intValue()
implicit def to_double(value: AnyRef): Double = value.asInstanceOf[java.lang.Double].doubleValue()
def attempt(func: => Unit) = {
try {
func
} catch {
case _ => // ignore
}
}
attempt(rc.uptime = mbean_server.getAttribute("java.lang:type=Runtime", "Uptime"))
attempt(rc.start_time = mbean_server.getAttribute("java.lang:type=Runtime", "StartTime"))
attempt(rc.runtime_name = mbean_server.getAttribute("java.lang:type=Runtime", "Name").toString)
attempt(rc.spec_name = mbean_server.getAttribute("java.lang:type=Runtime", "SpecName").toString)
attempt(rc.spec_vendor = mbean_server.getAttribute("java.lang:type=Runtime", "SpecVendor").toString)
attempt(rc.spec_version = mbean_server.getAttribute("java.lang:type=Runtime", "SpecVersion").toString)
attempt(rc.vm_name = mbean_server.getAttribute("java.lang:type=Runtime", "VmName").toString)
attempt(rc.vm_vendor = mbean_server.getAttribute("java.lang:type=Runtime", "VmVendor").toString)
attempt(rc.vm_version = mbean_server.getAttribute("java.lang:type=Runtime", "VmVersion").toString)
attempt(rc.os_arch = mbean_server.getAttribute("java.lang:type=OperatingSystem", "Arch").toString)
attempt(rc.os_fd_open = mbean_server.getAttribute("java.lang:type=OperatingSystem", "OpenFileDescriptorCount"))
attempt(rc.os_fd_max = mbean_server.getAttribute("java.lang:type=OperatingSystem", "MaxFileDescriptorCount"))
attempt(rc.os_memory_total = mbean_server.getAttribute("java.lang:type=OperatingSystem", "TotalPhysicalMemorySize"))
attempt(rc.os_memory_free = mbean_server.getAttribute("java.lang:type=OperatingSystem", "FreePhysicalMemorySize"))
attempt(rc.os_swap_free = mbean_server.getAttribute("java.lang:type=OperatingSystem", "FreeSwapSpaceSize"))
attempt(rc.os_swap_free = mbean_server.getAttribute("java.lang:type=OperatingSystem", "TotalSwapSpaceSize"))
attempt(rc.os_load_average = mbean_server.getAttribute("java.lang:type=OperatingSystem", "SystemLoadAverage"))
attempt(rc.os_cpu_time = mbean_server.getAttribute("java.lang:type=OperatingSystem", "ProcessCpuTime"))
attempt(rc.os_processors = mbean_server.getAttribute("java.lang:type=OperatingSystem", "AvailableProcessors"))
attempt(rc.classes_loaded = mbean_server.getAttribute("java.lang:type=ClassLoading", "LoadedClassCount"))
attempt(rc.classes_unloaded = mbean_server.getAttribute("java.lang:type=ClassLoading", "UnloadedClassCount"))
attempt(rc.threads_peak = mbean_server.getAttribute("java.lang:type=Threading", "PeakThreadCount"))
attempt(rc.threads_current = mbean_server.getAttribute("java.lang:type=Threading", "ThreadCount"))
def memory_metrics(data: CompositeData) = {
val rc = new MemoryMetricsDTO
rc.alloc = data.get("committed").asInstanceOf[java.lang.Long].longValue()
rc.used = data.get("used").asInstanceOf[java.lang.Long].longValue()
rc.max = data.get("max").asInstanceOf[java.lang.Long].longValue()
rc
}
attempt(rc.heap_memory = memory_metrics(mbean_server.getAttribute("java.lang:type=Memory", "HeapMemoryUsage").asInstanceOf[CompositeData]))
attempt(rc.non_heap_memory = memory_metrics(mbean_server.getAttribute("java.lang:type=Memory", "NonHeapMemoryUsage").asInstanceOf[CompositeData]))
rc
}
});
}
}
| alexeev/jboss-fuse-mirror | sandbox/fmc/fmc-rest/src/main/scala/org/fusesource/fabric/webui/agents/jvm/JVMResource.scala | Scala | apache-2.0 | 6,299 |
package lila.round
import scala.concurrent.duration._
import akka.actor._
import akka.pattern.{ ask, pipe }
import chess.{ Color, White, Black }
import play.api.libs.iteratee._
import play.api.libs.json._
import actorApi._
import lila.common.LightUser
import lila.game.actorApi.{ StartGame, UserStartGame }
import lila.game.Event
import lila.hub.actorApi.Deploy
import lila.hub.actorApi.game.ChangeFeatured
import lila.hub.actorApi.round.IsOnGame
import lila.hub.actorApi.tv.{ Select => TvSelect }
import lila.hub.TimeBomb
import lila.socket._
import lila.socket.actorApi.{ Connected => _, _ }
import makeTimeout.short
private[round] final class Socket(
gameId: String,
history: History,
lightUser: String => Option[LightUser],
uidTimeout: Duration,
socketTimeout: Duration,
disconnectTimeout: Duration,
ragequitTimeout: Duration,
simulActor: ActorSelection) extends SocketActor[Member](uidTimeout) {
private var hasAi = false
private val timeBomb = new TimeBomb(socketTimeout)
private var delayedCrowdNotification = false
private final class Player(color: Color) {
// when the player has been seen online for the last time
private var time: Double = nowMillis
// wether the player closed the window intentionally
private var bye: Int = 0
var userId = none[String]
def ping {
isGone foreach { _ ?? notifyGone(color, false) }
if (bye > 0) bye = bye - 1
time = nowMillis
}
def setBye {
bye = 3
}
private def isBye = bye > 0
private def isHostingSimul: Fu[Boolean] = userId ?? { u =>
simulActor ? lila.hub.actorApi.simul.GetHostIds mapTo manifest[Set[String]] map (_ contains u)
}
def isGone =
if (time < (nowMillis - isBye.fold(ragequitTimeout, disconnectTimeout).toMillis))
isHostingSimul map (!_)
else fuccess(false)
}
private val whitePlayer = new Player(White)
private val blackPlayer = new Player(Black)
override def preStart() {
super.preStart()
refreshSubscriptions
lila.game.GameRepo game gameId map SetGame.apply pipeTo self
}
override def postStop() {
super.postStop()
lilaBus.unsubscribe(self)
lilaBus.publish(lila.hub.actorApi.round.SocketEvent.Stop(gameId), 'roundDoor)
}
private def refreshSubscriptions {
lilaBus.unsubscribe(self)
watchers.flatMap(_.userTv).toList.distinct foreach { userId =>
lilaBus.subscribe(self, Symbol(s"userStartGame:$userId"))
}
}
def receiveSpecific = {
case SetGame(Some(game)) =>
hasAi = game.hasAi
whitePlayer.userId = game.player(White).userId
blackPlayer.userId = game.player(Black).userId
// from lilaBus 'startGame
// sets definitive user ids
// in case one joined after the socket creation
case StartGame(game) => self ! SetGame(game.some)
case d: Deploy =>
onDeploy(d) // default behaviour
history.enablePersistence
case PingVersion(uid, v) =>
timeBomb.delay
ping(uid)
ownerOf(uid) foreach { o =>
playerDo(o.color, _.ping)
}
withMember(uid) { member =>
(history getEventsSince v).fold(resyncNow(member))(batch(member, _))
}
case Bye(color) => playerDo(color, _.setBye)
case Broom =>
broom
if (timeBomb.boom) self ! PoisonPill
else if (!hasAi) Color.all foreach { c =>
playerGet(c, _.isGone) foreach { _ ?? notifyGone(c, true) }
}
case GetVersion => sender ! history.getVersion
case IsGone(color) => playerGet(color, _.isGone) pipeTo sender
case IsOnGame(color) => sender ! ownerOf(color).isDefined
case GetSocketStatus =>
playerGet(White, _.isGone) zip playerGet(Black, _.isGone) map {
case (whiteIsGone, blackIsGone) => SocketStatus(
version = history.getVersion,
whiteOnGame = ownerOf(White).isDefined,
whiteIsGone = whiteIsGone,
blackOnGame = ownerOf(Black).isDefined,
blackIsGone = blackIsGone)
} pipeTo sender
case Join(uid, user, color, playerId, ip, userTv) =>
val (enumerator, channel) = Concurrent.broadcast[JsValue]
val member = Member(channel, user, color, playerId, ip, userTv = userTv)
addMember(uid, member)
notifyCrowd
playerDo(color, _.ping)
sender ! Connected(enumerator, member)
if (member.userTv.isDefined) refreshSubscriptions
if (member.owner) lilaBus.publish(
lila.hub.actorApi.round.SocketEvent.OwnerJoin(gameId, color, ip),
'roundDoor)
case Nil =>
case eventList: EventList => notify(eventList.events)
case lila.chat.actorApi.ChatLine(chatId, line) => notify(List(line match {
case l: lila.chat.UserLine => Event.UserMessage(l, chatId endsWith "/w")
case l: lila.chat.PlayerLine => Event.PlayerMessage(l)
}))
case AnalysisAvailable => notifyAll("analysisAvailable")
case lila.hub.actorApi.setup.DeclineChallenge(_) => notifyAll("declined")
case Quit(uid) =>
members get uid foreach { member =>
quit(uid)
notifyCrowd
if (member.userTv.isDefined) refreshSubscriptions
}
case ChangeFeatured(_, msg) => watchers.foreach(_ push msg)
case TvSelect(msg) => watchers.foreach(_ push msg)
case UserStartGame(userId, game) => watchers filter (_ onUserTv userId) foreach {
_ push makeMessage("resync")
}
case round.TournamentStanding(id) => owners.foreach {
_ push makeMessage("tournamentStanding", id)
}
case NotifyCrowd =>
delayedCrowdNotification = false
val (anons, users) = watchers.map(_.userId flatMap lightUser).foldLeft(0 -> List[LightUser]()) {
case ((anons, users), Some(user)) => anons -> (user :: users)
case ((anons, users), None) => (anons + 1) -> users
}
val event = Event.Crowd(
white = ownerOf(White).isDefined,
black = ownerOf(Black).isDefined,
watchers = showSpectators(users, anons))
notifyAll(event.typ, event.data)
}
def notifyCrowd {
if (!delayedCrowdNotification) {
delayedCrowdNotification = true
context.system.scheduler.scheduleOnce(1 second, self, NotifyCrowd)
}
}
def notify(events: Events) {
val vevents = history addEvents events
members.values foreach { m => batch(m, vevents) }
}
def batch(member: Member, vevents: List[VersionedEvent]) {
vevents match {
case Nil =>
case List(one) => member push one.jsFor(member)
case many => member push makeMessage("b", many map (_ jsFor member))
}
}
def notifyOwner[A: Writes](color: Color, t: String, data: A) {
ownerOf(color) foreach { m =>
m push makeMessage(t, data)
}
}
def notifyGone(color: Color, gone: Boolean) {
notifyOwner(!color, "gone", gone)
}
def ownerOf(color: Color): Option[Member] =
members.values find { m => m.owner && m.color == color }
def ownerOf(uid: String): Option[Member] =
members get uid filter (_.owner)
def watchers: Iterable[Member] = members.values.filter(_.watcher)
def owners: Iterable[Member] = members.values.filter(_.owner)
private def playerGet[A](color: Color, getter: Player => A): A =
getter(color.fold(whitePlayer, blackPlayer))
private def playerDo(color: Color, effect: Player => Unit) {
effect(color.fold(whitePlayer, blackPlayer))
}
}
| JimmyMow/lila | modules/round/src/main/Socket.scala | Scala | mit | 7,460 |
package scalaz.stream.mongodb.update
import org.specs2.Specification
import org.specs2.specification.Snippets
import com.mongodb.{DBCollection, DBObject, BasicDBObjectBuilder, WriteConcern}
import org.bson.types.ObjectId
import org.specs2.matcher.MatchResult
import scalaz.stream.mongodb.collectionSyntax._
import scalaz.stream.mongodb.MongoRuntimeSpecification
import scalaz.stream.mongodb.channel.ChannelResult
import collection.JavaConverters._
class CollectionRemoveSpec extends Specification with Snippets with MongoRuntimeSpecification {
def is =
s2"""
${"Removing documents".title}
Documents are removed from the collection by simple remove command:
${ snippet { query("key" === 1) and remove }} $r1
Additionally you can specify write concern for remove command by using `ensure`
${ snippet { query("key" === 1) and remove.ensure(WriteConcern.MAJORITY) }}
Lastly, if the remove command on non sharded collection has to be isolated (this mean that no other updated may interleave during its execution),
just add isolated modifier to it
${ snippet { query("key" === 1) and remove.isolated(true) }}
"""
case class checkQuery(remove: ChannelResult[DBCollection,WriteResult]) {
lazy val mongo = new WithMongoCollection()
def document(v: Int) = BasicDBObjectBuilder.start("_id", new ObjectId).add("key", v).get
def writeDocs(count: Int) = {
val docs = for {i <- 0 until count} yield (document(i))
docs.foreach { d => mongo.collection.save(d) }
docs
}
def verifyRemove(f: (Seq[DBObject], Seq[DBObject]) => MatchResult[Any]): MatchResult[Any] = {
if (mongo.collection.count() > 0) mongo.collection.drop
val docs = writeDocs(5)
(mongo.collection through remove).run.run
val found = mongo.collection.find().iterator().asScala.toList
f(docs, found)
}
}
def r1 = checkQuery(query("key" >= 2) and remove).verifyRemove {
case (in, result) =>
result.size must_== 2
}
}
| Spinoco/scalaz-stream-mongodb | core/src/test/scala/scalaz/stream/mongodb/update/CollectionRemoveSpec.scala | Scala | mit | 2,042 |
package org.bitcoins.core.protocol.blockchain
import org.bitcoins.testkitcore.gen.BlockchainElementsGenerator
import org.scalacheck.{Prop, Properties}
/** Created by tom on 7/6/16.
*/
class BlockHeaderSpec extends Properties("BlockHeaderSpec") {
property("serialization symmetry") =
Prop.forAll(BlockchainElementsGenerator.blockHeader) { header =>
BlockHeader(header.hex) == header
}
}
| bitcoin-s/bitcoin-s | core-test/src/test/scala/org/bitcoins/core/protocol/blockchain/BlockHeaderSpec.scala | Scala | mit | 406 |
package com.keba.scala.bank.services
import java.util.Currency
import com.keba.scala.bank.account.BankAccount
import com.keba.scala.bank.exceptions.{BankAccountAlreadyExists, BankAccountNotFound, BankAccountOverdraft}
import com.keba.scala.bank.money.Money
import com.keba.scala.bank.repositories.BankAccountRepository
/**
* Provides services related to bank account banking.
* Created by alexp on 28/05/16.
*/
class BankingService extends BankingServiceMixinInterface {
/* Constants */
private val ACCOUNTNUMBER_FORMAT_REGEXP =
"""[0-9]{3}\\.[0-9]{3}""".r
/* Fields */
var exchangeRateService: ExchangeRateService = null
/**
* Registers the supplied bank account with the service.
* The account number in the bank account must not have been
* previously used to register a bank account.
*
* @param inNewBankAccount Bank account to register with the service.
* @throws BankAccountAlreadyExists If a bank account with the
* same account number already exists.
* @throws IllegalArgumentException If the supplied bank account's
* account number is not in a valid format.
*/
def registerBankAccount(inNewBankAccount: BankAccount): Unit = {
/*
This is a command-type method, so it does not return a result.
This method has a side-effect in that a bank account is
created in the repository.
*/
validateBankAccountNumberFormat(inNewBankAccount)
/* Attempt to create the new bank account in the repository */
BankAccountRepository.create(inNewBankAccount)
}
private def validateBankAccountNumberFormat(inBankAccount: BankAccount): Unit = {
/*
Make sure that the account number is the proper format
If the format is invalid, throws an exception
*/
inBankAccount.accountNumber match {
case ACCOUNTNUMBER_FORMAT_REGEXP() =>
/* Good account number, do nothing */
case _ =>
/* Bad account number, throw exception */
throw new IllegalArgumentException("Failed to register new bank account. Illegal account number format: " + inBankAccount.accountNumber)
}
}
/**
* Inquires the balance of the bank account with the supplied
* account number.
*
* @param inBankAccountNumber Account number of bank account for
* which to inquire for balance.
* @return Balance of the bank account.
* @throws IllegalArgumentException If the supplied account number
* is not in a valid format.
* @throws BankAccountNotFound If there is no corresponding bank
* account for the supplied bank account number.
*/
def balance(inBankAccountNumber: String): Money = {
validateBankAccountNumberFormat(inBankAccountNumber)
/*
* This is a query-type method, so it does not have
* any side-effects, it is idempotent.
*/
val theBankAccountOption = retrieveBankAccount(inBankAccountNumber)
val theBankAccount = theBankAccountOption.get
theBankAccount.balance
}
/**
* * Validates the format of the account number of the supplied
* bank account. If it is not in the appropriate format, throw
* an exception.
*/
protected def validateBankAccountNumberFormat(inBankAccountNumber: String): Unit = {
/*
* Make sure that the account number is the proper format.
* If the format is invalid, throws an exception.
*/
inBankAccountNumber match {
case ACCOUNTNUMBER_FORMAT_REGEXP() =>
/* Good account number, do nothing. */
case _ =>
/* Bad account number, throw exception. */
throw new IllegalArgumentException(
"Failed to register new bank account. " + "Illegal account number format: " + inBankAccountNumber)
}
}
/**
* Deposits the supplied amount of money to the bank account with
* the supplied account number.
*
* @param inBankAccountNumber Account number of bank account to
* which to deposit money.
* @param inAmount Amount of money to deposit to the account.
* @throws IllegalArgumentException If the supplied account number
* is not in a valid format.
* @throws BankAccountNotFound If there is no corresponding bank
* account for the supplied bank account number.
*/
def deposit(inBankAccountNumber: String, inAmount: Money): Unit = {
/*
* This is a command-type method, so we do not return a result.
* The method has side-effects in that the balance of a
* bank account is updated.
*/
/* Retrieve bank account with supplied account number. */
val theBankAccountOption = retrieveBankAccount(inBankAccountNumber)
val theBankAccount = theBankAccountOption.get
/*
* Exchange the currency to deposit to the currency of
* the bank account. The exchange rate service will return
* the supplied amount if it already is of the desired currency,
* so it is safe to always perform the exchange operation.
*/
val theExchangedAmountToDepositOption = exchangeMoney(inAmount, theBankAccount.currency)
val theExchangedAmountToDeposit = theExchangedAmountToDepositOption.get
/*
* Arriving here, we know that we have a bank account,
* money to deposit in the bank account's currency and can
* now perform the deposit and update the bank account.
*/
theBankAccount.deposit(theExchangedAmountToDeposit)
updateBankAccount(theBankAccount)
}
/**
* Retrieves bank account with supplied account number from
* the bank account repository.
* This method isolates access to the bank account repository in
* order to enable us to add error handling, exception translation,
* logging etc. of access to a repository.
* Note that we assume only a scenario in which access to the
* repository is successful.
*
* @param inBankAccountNumber Account number of bank account
* to retrieve.
* @return Option holding bank account with supplied account number,
* or None if no bank account was found.
*/
protected def retrieveBankAccount(inBankAccountNumber: String): Option[BankAccount] = {
val theBankAccountOption = BankAccountRepository.findBankAccountWithAccountNumber(inBankAccountNumber)
theBankAccountOption
}
/**
* Updates supplied bank account in the bank account repository.
* This method isolates access to the bank account repository in
* order to enable us to add error handling, exception translation,
* logging etc. of access to a repository.
* Note that we assume only a scenario in which a bank account is
* found.
*
* @param inBankAccount Bank account to update.
*/
protected def updateBankAccount(inBankAccount: BankAccount): Unit = {
BankAccountRepository.update(inBankAccount)
}
/**
* Exchanges the supplied amount of money to the supplied currency.
* This method isolates access to the exchange rate service in
* order to enable us to add error handling, exception translation,
* logging etc. of access to a particular service external to this service.
* Note that we assume only a scenario in the exchange is successful.
*
* @param inAmount Money to exchange.
* @param inToCurrency Currency to exchange money to.
* @return Option holding exchanged money, or None if no exchange
* rate registered for the exchange.
*/
protected def exchangeMoney(inAmount: Money, inToCurrency: Currency): Option[Money] = {
val theExchangedMoneyOption = exchangeRateService.exchange(inAmount, inToCurrency)
theExchangedMoneyOption
}
/**
* Withdraws the supplied amount of money from the bank account with
* the supplied account number.
*
* @param inBankAccountNumber Account number of bank account from
* which to withdraw money.
* @param inAmount Amount of money to withdraw from the account.
* @throws IllegalArgumentException If the supplied account number
* is not in a valid format.
* @throws BankAccountNotFound If there is no corresponding bank
* account for the supplied bank account number.
* @throws BankAccountOverdraft If an attempt was made to overdraft
* the bank account.
*/
def withdraw(inBankAccountNumber: String, inAmount: Money): Unit = {
/*
* This is a command-type method, so we do not return a result.
* The method has side-effects in that the balance of a
* bank account is updated.
*/
/* Retrieve bank account with supplied account number. */
val theBankAccountOption = retrieveBankAccount(inBankAccountNumber)
val theBankAccount = theBankAccountOption.get
/*
* Exchange the currency to withdraw to the currency of
* the bank account. The exchange rate service will do nothing if
* the supplied amount is of the desired currency, so it is
* safe to always perform the exchange operation.
*/
val theExchangedAmountToWithdrawOption = exchangeMoney(inAmount, theBankAccount.currency)
val theExchangedAmountToWithdraw = theExchangedAmountToWithdrawOption.get
/*
* Arriving here, we know that we have a bank account,
* money to withdraw in the bank account's currency and can
* now perform the withdrawal and update the bank account.
*/
theBankAccount.withdraw(theExchangedAmountToWithdraw)
updateBankAccount(theBankAccount)
}
}
| alexp82/ddd-banking-system | src/main/scala/com/keba/scala/bank/services/BankingService.scala | Scala | apache-2.0 | 9,765 |
package shopping.products
case object Apple extends Product {
override val id: Int = 0
override val cost: Double = 0.60
}
| scottkwalker/shopping-cart | src/main/scala/shopping/products/Apple.scala | Scala | cc0-1.0 | 127 |
/*
mls: basic machine learning algorithms for Scala
Copyright (C) 2014 Davi Pereira dos Santos
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package ml.models
import ml.Pattern
import scala.util.Random
case class RandomRank(seed: Int) extends Model {
def JS(pattern: Pattern) = ???
def predict(instance: Pattern) = output(instance).zipWithIndex.maxBy(_._1)._2
def distribution(instance: Pattern) = {
println(s"distribution was not calculated with Platt/sigmoid")
val arr = output(instance)
val min = arr.min
val max = arr.max
val norm = arr map (x => x - min / max - min)
val sum = norm.sum
norm map (x => x / sum)
}
val rnd = new Random(seed)
def output(instance: Pattern) = (rnd.shuffle(1 to instance.nclasses) map (_.toDouble)).toArray
}
| machine-learning-scala/mls | src/main/scala/ml/models/RandomRank.scala | Scala | gpl-3.0 | 1,378 |
package io.buoyant.telemetry
import com.fasterxml.jackson.annotation.JsonIgnore
import com.fasterxml.jackson.databind.annotation.JsonDeserialize
import com.timgroup.statsd.NonBlockingStatsDClient
import com.twitter.finagle.Stack
import com.twitter.finagle.util.DefaultTimer
import com.twitter.logging.Logger
import io.buoyant.telemetry.statsd.{StatsDStatsReceiver, StatsDTelemeter}
class StatsDInitializer extends TelemeterInitializer {
type Config = StatsDConfig
val configClass = classOf[StatsDConfig]
override val configId = "io.l5d.statsd"
}
private[telemetry] object StatsDConfig {
val DefaultPrefix = "linkerd"
val DefaultHostname = "127.0.0.1"
val DefaultPort = 8125
val DefaultGaugeIntervalMs = 10000 // for gauges
val DefaultSampleRate = 0.01d // for counters and timing/histograms
val MaxQueueSize = 10000
}
case class StatsDConfig(
prefix: Option[String],
hostname: Option[String],
port: Option[Int],
gaugeIntervalMs: Option[Int],
@JsonDeserialize(contentAs = classOf[java.lang.Double]) sampleRate: Option[Double]
) extends TelemeterConfig {
import StatsDConfig._
@JsonIgnore private[this] val log = Logger.get("io.l5d.statsd")
log.warning(
"Warning, you're using the `io.l5d.statsd` telemeter, which is unsupported " +
"and probably won't do what you expect. Use of this telemeter may lead to" +
" poor performance or decreased data quality.\\n" +
"Please see https://discourse.linkerd.io/t/deprecating-the-statsd-telemeter for more information."
)
@JsonIgnore override val experimentalRequired = true
@JsonIgnore private[this] val statsDPrefix = prefix.getOrElse(DefaultPrefix)
@JsonIgnore private[this] val statsDHost = hostname.getOrElse(DefaultHostname)
@JsonIgnore private[this] val statsDPort = port.getOrElse(DefaultPort)
@JsonIgnore private[this] val statsDInterval = gaugeIntervalMs.getOrElse(DefaultGaugeIntervalMs)
@JsonIgnore private[this] val statsDSampleRate = sampleRate.getOrElse(DefaultSampleRate)
@JsonIgnore
def mk(params: Stack.Params): StatsDTelemeter = {
// initiate a UDP connection at startup time
log.info("connecting to StatsD at %s:%d as %s", statsDHost, statsDPort, statsDPrefix)
val statsDClient = new NonBlockingStatsDClient(
statsDPrefix,
statsDHost,
statsDPort,
MaxQueueSize
)
new StatsDTelemeter(
new StatsDStatsReceiver(statsDClient, statsDSampleRate),
statsDInterval,
DefaultTimer
)
}
}
| linkerd/linkerd | telemetry/statsd/src/main/scala/io/buoyant/telemetry/StatsDInitializer.scala | Scala | apache-2.0 | 2,486 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.ml.tree.impl
import scala.collection.mutable.{HashMap => MutableHashMap}
/**
* Time tracker implementation which holds labeled timers.
*/
private[spark] class TimeTracker extends Serializable {
private val starts: MutableHashMap[String, Long] = new MutableHashMap[String, Long]()
private val totals: MutableHashMap[String, Long] = new MutableHashMap[String, Long]()
/**
* Starts a new timer, or re-starts a stopped timer.
*/
def start(timerLabel: String): Unit = {
val currentTime = System.nanoTime()
if (starts.contains(timerLabel)) {
throw new RuntimeException(s"TimeTracker.start(timerLabel) called again on" +
s" timerLabel = $timerLabel before that timer was stopped.")
}
starts(timerLabel) = currentTime
}
/**
* Stops a timer and returns the elapsed time in seconds.
*/
def stop(timerLabel: String): Double = {
val currentTime = System.nanoTime()
if (!starts.contains(timerLabel)) {
throw new RuntimeException(s"TimeTracker.stop(timerLabel) called on" +
s" timerLabel = $timerLabel, but that timer was not started.")
}
val elapsed = currentTime - starts(timerLabel)
starts.remove(timerLabel)
if (totals.contains(timerLabel)) {
totals(timerLabel) += elapsed
} else {
totals(timerLabel) = elapsed
}
elapsed / 1e9
}
/**
* Print all timing results in seconds.
*/
override def toString: String = {
totals.map { case (label, elapsed) =>
s" $label: ${elapsed / 1e9}"
}.mkString("\\n")
}
}
| mike0sv/spark | mllib/src/main/scala/org/apache/spark/ml/tree/impl/TimeTracker.scala | Scala | apache-2.0 | 2,374 |
/*
* Copyright (C) 2016-2017 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.internal.api
import java.lang.reflect.{ InvocationHandler, Type }
import java.util
import java.util.{ Optional, UUID }
import java.util.concurrent.CompletionStage
import akka.util.ByteString
import com.lightbend.lagom.javadsl.api.Descriptor.RestCallId
import com.lightbend.lagom.javadsl.api.deser.MessageSerializer.{ NegotiatedDeserializer, NegotiatedSerializer }
import com.lightbend.lagom.javadsl.api.deser._
import com.lightbend.lagom.javadsl.api.transport.{ MessageProtocol, Method }
import com.lightbend.lagom.api.mock.{ BlogService, MockService }
import org.scalatest._
import com.lightbend.lagom.api.mock.ScalaMockService
import com.lightbend.lagom.api.mock.ScalaMockServiceWrong
import com.lightbend.lagom.internal.javadsl.api.{ JacksonPlaceholderExceptionSerializer, JacksonPlaceholderSerializerFactory, MethodServiceCallHolder, ServiceReader }
import com.lightbend.lagom.javadsl.api.{ Descriptor, Service, ServiceCall }
import scala.reflect.ClassTag
class ServiceReaderSpec extends WordSpec with Matchers with Inside {
"The service reader" should {
"read a simple Java service descriptor" in {
val descriptor = serviceDescriptor[MockService]
descriptor.calls().size() should ===(1)
val endpoint = descriptor.calls().get(0)
endpoint.callId() should ===(new RestCallId(Method.GET, "/hello/:name"))
inside(endpoint.requestSerializer) {
case simple: SimpleSerializer[_] => simple.`type` should ===(classOf[UUID])
}
endpoint.responseSerializer should ===(MessageSerializers.STRING)
}
"read a simple Scala service descriptor" in {
val descriptor = serviceDescriptor[ScalaMockService]
descriptor.calls().size() should ===(1)
val endpoint = descriptor.calls().get(0)
endpoint.callId() should ===(new RestCallId(Method.GET, "/hello/:name"))
inside(endpoint.requestSerializer) {
case simple: SimpleSerializer[_] => simple.`type` should ===(classOf[UUID])
}
endpoint.responseSerializer should ===(MessageSerializers.STRING)
}
"fail to read a Scala service descriptor from a class" in {
intercept[IllegalArgumentException] {
ServiceReader.readServiceDescriptor(getClass.getClassLoader, classOf[ScalaMockServiceWrong])
}
}
"fail to read a Java service descriptor from a non-public interface (and report it in a user-friendly manner)" in {
val caught = intercept[IllegalArgumentException] {
ServiceReader.readServiceDescriptor(getClass.getClassLoader, classOf[NotPublicInterfaceService])
}
caught.getMessage should ===("Service API must be described in a public interface.")
}
"resolve the service descriptor path param serializers" in {
val descriptor = serviceDescriptor[BlogService]
def serializeArgs(call: Descriptor.Call[_, _], args: Seq[Any]): Seq[Seq[String]] = {
call.serviceCallHolder() match {
case method: MethodServiceCallHolder => method.invoke(args.asInstanceOf[Seq[AnyRef]])
}
}
def deserializeParams(call: Descriptor.Call[_, _], params: Seq[Seq[String]]): Seq[Any] = {
call.serviceCallHolder() match {
case method: MethodServiceCallHolder =>
class ArgsCapturingServiceCall(val args: Array[AnyRef]) extends ServiceCall[Any, Any] {
override def invoke(request: Any): CompletionStage[Any] = ???
}
val blogService = java.lang.reflect.Proxy.newProxyInstance(classOf[BlogService].getClassLoader, Array(classOf[BlogService]), new InvocationHandler {
override def invoke(proxy: scala.Any, method: java.lang.reflect.Method, args: Array[AnyRef]): AnyRef = {
new ArgsCapturingServiceCall(args)
}
})
method.create(blogService, params).asInstanceOf[ArgsCapturingServiceCall].args
}
}
val blogCall = descriptor.calls().get(0)
deserializeParams(blogCall, Seq(Seq("some name"))) should ===(Seq("some name"))
serializeArgs(blogCall, Seq("some name")) should ===(Seq(Seq("some name")))
val postsCall = descriptor.calls().get(1)
deserializeParams(postsCall, Seq(Seq("some name"), Seq("3"), Seq("10"))) should ===(Seq("some name", Optional.of(3), Optional.of(10)))
serializeArgs(postsCall, Seq("some name", Optional.of(3), Optional.of(10))) should ===(Seq(Seq("some name"), Seq("3"), Seq("10")))
deserializeParams(postsCall, Seq(Seq("some name"), Seq(), Seq())) should ===(Seq("some name", Optional.empty, Optional.empty))
serializeArgs(postsCall, Seq("some name", Optional.empty, Optional.empty)) should ===(Seq(Seq("some name"), Seq(), Seq()))
val postCall = descriptor.calls().get(2)
deserializeParams(postCall, Seq(Seq("some name"), Seq("10"))) should ===(Seq("some name", 10L))
serializeArgs(postCall, Seq("some name", 10L)) should ===(Seq(Seq("some name"), Seq("10")))
val commentCall = descriptor.calls().get(3)
deserializeParams(commentCall, Seq(Seq("some name"), Seq("10"), Seq("20"))) should ===(Seq("some name", 10L, 20L))
serializeArgs(commentCall, Seq("some name", 10L, 20L)) should ===(Seq(Seq("some name"), Seq("10"), Seq("20")))
}
}
def serviceDescriptor[S <: Service](implicit ct: ClassTag[S]) = {
ServiceReader.resolveServiceDescriptor(ServiceReader.readServiceDescriptor(
getClass.getClassLoader,
ct.runtimeClass.asInstanceOf[Class[Service]]
), getClass.getClassLoader, Map(JacksonPlaceholderSerializerFactory -> new SimpleSerializerFactory),
Map(JacksonPlaceholderExceptionSerializer -> new SimpleExceptionSerializer))
}
private class SimpleSerializerFactory extends SerializerFactory {
override def messageSerializerFor[MessageEntity](`type`: Type): MessageSerializer[MessageEntity, _] = {
new SimpleSerializer[MessageEntity](`type`)
}
}
private class SimpleSerializer[MessageEntity](val `type`: Type) extends StrictMessageSerializer[MessageEntity] {
val serializer = new NegotiatedSerializer[MessageEntity, ByteString]() {
override def serialize(messageEntity: MessageEntity): ByteString = {
ByteString.fromString(messageEntity.toString)
}
override def protocol() = new MessageProtocol(Optional.of("text/plain"), Optional.of("utf-8"), Optional.empty())
}
val deser = new NegotiatedDeserializer[MessageEntity, ByteString] {
override def deserialize(bytes: ByteString): MessageEntity = bytes.utf8String.asInstanceOf[MessageEntity]
}
override def deserializer(messageHeader: MessageProtocol) = deser
override def serializerForResponse(acceptedMessageHeaders: util.List[MessageProtocol]) = serializer
override def serializerForRequest() = serializer
}
private class SimpleExceptionSerializer extends ExceptionSerializer {
override def serialize(exception: Throwable, accept: util.Collection[MessageProtocol]): RawExceptionMessage = ???
override def deserialize(message: RawExceptionMessage): Throwable = ???
}
}
| edouardKaiser/lagom | service/javadsl/api/src/test/scala/com/lightbend/lagom/internal/api/ServiceReaderSpec.scala | Scala | apache-2.0 | 7,155 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ly.stealth.mesos.exhibitor
import org.junit.Assert._
import org.junit.Test
import scala.util.{Failure, Try}
class ConstraintTest {
@Test
def parse() {
val like = Constraint("like:1").asInstanceOf[Constraint.Like]
assertEquals("1", like.regex)
assertFalse(like.negated)
val unlike = Constraint("unlike:1").asInstanceOf[Constraint.Like]
assertEquals("1", unlike.regex)
assertTrue(unlike.negated)
val unique = Constraint("unique")
assertTrue(unique.isInstanceOf[Constraint.Unique])
val cluster = Constraint("cluster").asInstanceOf[Constraint.Cluster]
assertEquals(None, cluster.value)
val cluster123 = Constraint("cluster:123").asInstanceOf[Constraint.Cluster]
assertEquals(Some("123"), cluster123.value)
val groupBy = Constraint("groupBy").asInstanceOf[Constraint.GroupBy]
assertEquals(1, groupBy.groups)
val groupBy3 = Constraint("groupBy:3").asInstanceOf[Constraint.GroupBy]
assertEquals(3, groupBy3.groups)
Try(Constraint("unsupported")) match {
case Failure(t) if t.isInstanceOf[IllegalArgumentException] => assertTrue("" + t, t.getMessage.contains("Unsupported condition"))
case other => fail(other.toString)
}
val constraints = Constraint.parse("hostname=unique,hostname=like:slave.*")
assertEquals(1, constraints.size)
val hostnameConstraintsOpt = constraints.get("hostname")
assertNotEquals(None, hostnameConstraintsOpt)
val hostnameConstraints = hostnameConstraintsOpt.get
assertEquals(2, hostnameConstraints.size)
}
@Test
def matches() {
assertTrue(Constraint("like:abc").matches("abc"))
assertFalse(Constraint("like:abc").matches("abc1"))
assertTrue(Constraint("like:a.*").matches("abc"))
assertFalse(Constraint("like:a.*").matches("bc"))
assertTrue(Constraint("unique").matches("a"))
assertFalse(Constraint("unique").matches("a", List("a")))
assertTrue(Constraint("cluster").matches("a"))
assertFalse(Constraint("cluster").matches("b", List("a")))
assertTrue(Constraint("groupBy").matches("a", List("a")))
assertFalse(Constraint("groupBy").matches("a", List("b")))
}
@Test
def string() {
assertEquals("like:abc", "" + Constraint("like:abc"))
assertEquals("unlike:abc", "" + Constraint("unlike:abc"))
assertEquals("unique", "" + Constraint("unique"))
assertEquals("cluster", "" + Constraint("cluster"))
assertEquals("cluster:123", "" + Constraint("cluster:123"))
assertEquals("groupBy", "" + Constraint("groupBy"))
assertEquals("groupBy:3", "" + Constraint("groupBy:3"))
}
@Test
def matchesLike() {
val like = Constraint("like:1.*2")
assertTrue(like.matches("12"))
assertTrue(like.matches("1a2"))
assertTrue(like.matches("1ab2"))
assertFalse(like.matches("a1a2"))
assertFalse(like.matches("1a2a"))
val unlike = Constraint("unlike:1")
assertFalse(unlike.matches("1"))
assertTrue(unlike.matches("2"))
}
@Test
def matchesUnique() {
val unique = Constraint("unique")
assertTrue(unique.matches("1"))
assertTrue(unique.matches("2", List("1")))
assertTrue(unique.matches("3", List("1", "2")))
assertFalse(unique.matches("1", List("1", "2")))
assertFalse(unique.matches("2", List("1", "2")))
}
@Test
def matchesCluster() {
val cluster = Constraint("cluster")
assertTrue(cluster.matches("1"))
assertTrue(cluster.matches("2"))
assertTrue(cluster.matches("1", List("1")))
assertTrue(cluster.matches("1", List("1", "1")))
assertFalse(cluster.matches("2", List("1")))
val cluster3 = Constraint("cluster:3")
assertTrue(cluster3.matches("3"))
assertFalse(cluster3.matches("2"))
assertTrue(cluster3.matches("3", List("3")))
assertTrue(cluster3.matches("3", List("3", "3")))
assertFalse(cluster3.matches("2", List("3")))
}
@Test
def matchesGroupBy() {
val groupBy = Constraint("groupBy")
assertTrue(groupBy.matches("1"))
assertTrue(groupBy.matches("1", List("1")))
assertTrue(groupBy.matches("1", List("1", "1")))
assertFalse(groupBy.matches("1", List("2")))
val groupBy2 = Constraint("groupBy:2")
assertTrue(groupBy2.matches("1"))
assertFalse(groupBy2.matches("1", List("1")))
assertFalse(groupBy2.matches("1", List("1", "1")))
assertTrue(groupBy2.matches("2", List("1")))
assertTrue(groupBy2.matches("1", List("1", "2")))
assertTrue(groupBy2.matches("2", List("1", "2")))
assertFalse(groupBy2.matches("1", List("1", "1", "2")))
assertTrue(groupBy2.matches("2", List("1", "1", "2")))
}
}
| CiscoCloud/exhibitor-mesos-framework | src/main/test/ly/stealth/mesos/exhibitor/ConstraintTest.scala | Scala | apache-2.0 | 5,405 |
package com.example.app
import com.example.service.{HttpServiceComponent, TwitterServiceComponent}
/**
* Created by takao on 2016/11/03.
*/
object ComponentRegistry extends
TwitterImageAppComponent with
ConfigComponent with
TwitterServiceComponent with
FlowsComponent with
HttpServiceComponent {
override val config = new Config()
override val twitterImageApp = new TwitterImageApp
override val twitterService = new TwitterService
override val flows = new Flows
override val httpService = new HttpService
}
| SakaiTakao/TwitterImageDownloader | src/main/scala/com/example/app/ComponentRegistry.scala | Scala | bsd-2-clause | 554 |
/* NEST (New Scala Test)
* Copyright 2007-2013 LAMP/EPFL
* @author Philipp Haller
*/
// $Id$
package scala.tools.partest
package nest
import java.io.{
File,
FilenameFilter,
IOException,
StringWriter,
FileInputStream,
FileOutputStream,
BufferedReader,
FileReader,
PrintWriter,
FileWriter
}
import java.net.URI
import scala.reflect.io.AbstractFile
import scala.collection.mutable
import scala.reflect.internal.util.ScalaClassLoader
import java.net.URLClassLoader
object FileManager {
def getLogFile(dir: File, fileBase: String, kind: String): File =
new File(dir, fileBase + "-" + kind + ".log")
def getLogFile(file: File, kind: String): File = {
val dir = file.getParentFile
val fileBase = basename(file.getName)
getLogFile(dir, fileBase, kind)
}
def logFileExists(file: File, kind: String) =
getLogFile(file, kind).canRead
def overwriteFileWith(dest: File, file: File) =
dest.isFile && copyFile(file, dest)
def copyFile(from: File, dest: File): Boolean = {
if (from.isDirectory) {
assert(dest.isDirectory, "cannot copy directory to file")
val subDir: Directory = Path(dest) / Directory(from.getName)
subDir.createDirectory()
from.listFiles.toList forall (copyFile(_, subDir))
} else {
val to = if (dest.isDirectory) new File(dest, from.getName) else dest
try {
SFile(to) writeAll SFile(from).slurp()
true
} catch { case _: IOException => false }
}
}
def mapFile(file: File, replace: String => String) {
val f = SFile(file)
f.printlnAll(f.lines.toList map replace: _*)
}
def jarsWithPrefix(dir: Directory, name: String): Iterator[SFile] =
dir.files filter (f => (f hasExtension "jar") && (f.name startsWith name))
def dirsWithPrefix(dir: Directory, name: String): Iterator[Directory] =
dir.dirs filter (_.name startsWith name)
def joinPaths(paths: List[Path]) = ClassPath.join(paths.map(_.getAbsolutePath).distinct: _*)
/** Compares two files using difflib to produce a unified diff.
*
* @param original the first file to be compared
* @param revised the second file to be compared
* @return the unified diff of the compared files or the empty string if they're equal
*/
def compareFiles(original: File, revised: File): String = {
compareContents(io.Source.fromFile(original).getLines.toSeq, io.Source.fromFile(revised).getLines.toSeq, original.getName, revised.getName)
}
/** Compares two lists of lines using difflib to produce a unified diff.
*
* @param origLines the first seq of lines to be compared
* @param newLines the second seq of lines to be compared
* @param origName file name to be used in unified diff for `origLines`
* @param newName file name to be used in unified diff for `newLines`
* @return the unified diff of the `origLines` and `newLines` or the empty string if they're equal
*/
def compareContents(original: Seq[String], revised: Seq[String], originalName: String = "a", revisedName: String = "b"): String = {
import collection.JavaConverters._
object ignoreWhitespace extends difflib.myers.Equalizer[String] {
def equals(orig: String, revised: String) = orig.trim == revised.trim
}
val diff = difflib.DiffUtils.diff(original.asJava, revised.asJava, ignoreWhitespace)
if (diff.getDeltas.isEmpty) ""
else difflib.DiffUtils.generateUnifiedDiff(originalName, revisedName, original.asJava, diff, 1).asScala.mkString("\\n")
}
}
class FileManager(val testClassLoader: URLClassLoader) {
lazy val libraryUnderTest: Path = findArtifact("library")
lazy val reflectUnderTest: Path = findArtifact("reflect")
lazy val compilerUnderTest: Path = findArtifact("compiler")
lazy val testClassPath = testClassLoader.getURLs().map(url => Path(new File(url.toURI))).toList
def this(testClassPath: List[Path]) {
this(new URLClassLoader(testClassPath.toArray map (_.toURI.toURL)))
}
def distKind = {
val p = libraryUnderTest.getAbsolutePath
if (p endsWith "build/quick/classes/library") "quick"
else if (p endsWith "build/pack/lib/scala-library.jar") "pack"
else if (p endsWith "dists/latest/lib/scala-library.jar") "latest"
else "installed"
}
// find library/reflect/compiler jar or subdir under build/$stage/classes/
private def findArtifact(name: String): Path = {
val canaryClass =
name match {
case "library" => Class.forName("scala.Unit", false, testClassLoader)
case "reflect" => Class.forName("scala.reflect.api.Symbols", false, testClassLoader)
case "compiler" => Class.forName("scala.tools.nsc.Main", false, testClassLoader)
}
val path = Path(canaryClass.getProtectionDomain.getCodeSource.getLocation.getPath)
if (path.extension == "jar"
|| path.absolutePathSegments.endsWith(Seq("classes", name))) path
else sys.error(
s"""Test Classloader does not contain a $name jar or classes/$name directory.
|Looked in: $testClassPath""")
}
}
| adriaanm/scala-partest | src/main/scala/scala/tools/partest/nest/FileManager.scala | Scala | bsd-3-clause | 5,055 |
/*
* Copyright (c) 2014-16 Miles Sabin
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package shapeless
import scala.language.experimental.macros
import scala.reflect.macros.blackbox
package object test {
def typed[T](t : => T) {}
def sameTyped[T](t1: => T)(t2: => T) {}
def showType[T]: String = macro TestMacros.showTypeNoValue[T]
def showType[T](t: => T): String = macro TestMacros.showType[T]
}
@macrocompat.bundle
class TestMacros(val c: blackbox.Context) {
import c.universe._
def showTypeNoValue[T: WeakTypeTag]: Tree = q"${weakTypeOf[T].dealias.toString}"
def showType[T: WeakTypeTag](t: Tree): Tree = showTypeNoValue[T]
}
| rorygraves/perf_tester | corpus/shapeless/src/main/scala/shapeless/test/package.scala | Scala | apache-2.0 | 1,170 |
package slate
package util
class DelimitTransformSuite extends SlateSuite {
import DelimitTransform._
"simple" - {
"string" in {
string.to("hello").value shouldBe "hello"
string.from("hello") shouldBe "hello"
}
"arr" in {
string.splitBy("d").to("hello").value.toVector shouldBe Vector("hello")
string.splitBy("d").to("heldlo").value.toVector shouldBe Vector("hel", "lo")
}
"then" in {
(string | "o" | string).to("helloworld").value shouldBe (("hell", "world"))
}
}
"compound" - {
"arr - join - arr" in {
val (r1, r2) =
(string.splitBy(",") | ":" | string.splitBy(";")).to("hello,world:bonjour;monde").value
(r1.toVector, r2.toVector) shouldBe ((Vector("hello", "world"), Vector("bonjour", "monde")))
}
}
}
| edmundnoble/slate | ui/src/test/scala/slate/util/DelimitTransformSuite.scala | Scala | mit | 801 |
package com.fuscus.seien.infra.util
import java.io.ByteArrayInputStream
import java.nio.charset.StandardCharsets
import java.security.cert.{ CertificateFactory, X509Certificate }
trait SSLUtil {
/**
* get expired from certificate
*
* @param certificateString unescaped string
* @return
*/
def expiredValidity(certificateString: String): Long = {
val cert: X509Certificate = generateCertificate(certificateString)
cert.getNotAfter.getTime
}
/**
* generate certificate(X509)
*
* @param certificateString unescaped string
* @return
*/
private def generateCertificate(certificateString: String): X509Certificate = {
CertificateFactory.getInstance("X.509").generateCertificate(
new ByteArrayInputStream(certificateString.getBytes(StandardCharsets.UTF_8))).asInstanceOf[X509Certificate]
}
}
| watawuwu/seien-backend | modules/infra/app/com/fuscus/seien/infra/util/SSLUtil.scala | Scala | mit | 848 |
/*
* Copyright 2017 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.accounts.frs102
import org.scalatest.{Matchers, WordSpec}
import uk.gov.hmrc.ct.accounts.frs10x.boxes.AC8033
import uk.gov.hmrc.ct.box.CtValidation
class AC8033Spec extends WordSpec with Matchers {
"AC8033 should" should {
"validate secretary successfully when no errors present" in {
val secretary = AC8033(Some("Joana"))
secretary.validate(null) shouldBe empty
}
"validate secretary missing" in {
val secretary = AC8033(None)
secretary.validate(null) shouldBe empty
}
"validate secretary name length" in {
val secretary = AC8033(Some("a" * 41))
val expectedError = Set(CtValidation(Some("AC8033"), "error.AC8033.text.sizeRange", Some(List("1", "40"))))
secretary.validate(null) shouldBe expectedError
}
"validate secretary name characters" in {
val secretary = AC8033(Some("^^"))
val expectedError = Set(CtValidation(Some("AC8033"), "error.AC8033.regexFailure", Some(List("^"))))
secretary.validate(null) shouldBe expectedError
}
}
}
| pncampbell/ct-calculations | src/test/scala/uk/gov/hmrc/ct/accounts/frs102/AC8033Spec.scala | Scala | apache-2.0 | 1,677 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.planner.plan.nodes.physical.stream
import org.apache.flink.api.dag.Transformation
import org.apache.flink.table.api.TableException
import org.apache.flink.table.data.RowData
import org.apache.flink.table.planner.delegation.StreamPlanner
import org.apache.flink.table.planner.plan.nodes.common.CommonPythonCorrelate
import org.apache.flink.table.planner.plan.nodes.logical.FlinkLogicalTableFunctionScan
import org.apache.calcite.plan.{RelOptCluster, RelTraitSet}
import org.apache.calcite.rel.RelNode
import org.apache.calcite.rel.`type`.RelDataType
import org.apache.calcite.rel.core.JoinRelType
import org.apache.calcite.rex.{RexNode, RexProgram}
/**
* Flink RelNode which matches along with join a python user defined table function.
*/
class StreamExecPythonCorrelate(
cluster: RelOptCluster,
traitSet: RelTraitSet,
inputRel: RelNode,
projectProgram: Option[RexProgram],
scan: FlinkLogicalTableFunctionScan,
condition: Option[RexNode],
outputRowType: RelDataType,
joinType: JoinRelType)
extends StreamExecCorrelateBase(
cluster,
traitSet,
inputRel,
projectProgram,
scan,
condition,
outputRowType,
joinType)
with CommonPythonCorrelate {
if (condition.isDefined) {
throw new TableException("Currently Python correlate does not support conditions in left join.")
}
def copy(
traitSet: RelTraitSet,
newChild: RelNode,
projectProgram: Option[RexProgram],
outputType: RelDataType): RelNode = {
new StreamExecPythonCorrelate(
cluster,
traitSet,
newChild,
projectProgram,
scan,
condition,
outputType,
joinType)
}
override protected def translateToPlanInternal(
planner: StreamPlanner): Transformation[RowData] = {
val inputTransformation = getInputNodes.get(0).translateToPlan(planner)
.asInstanceOf[Transformation[RowData]]
createPythonOneInputTransformation(
inputTransformation,
scan,
"StreamExecPythonCorrelate",
outputRowType,
getConfig(planner.getExecEnv, planner.getTableConfig),
joinType)
}
}
| GJL/flink | flink-table/flink-table-planner-blink/src/main/scala/org/apache/flink/table/planner/plan/nodes/physical/stream/StreamExecPythonCorrelate.scala | Scala | apache-2.0 | 2,959 |
package saz2
class Teste {
val nome = "jcranky"
private var _idade = 99
def idade_=(x: Int) = {
require(x < 100)
_idade = x
}
def idade = _idade
}
object MainTeste extends App {
val t = new Teste()
t.idade = 109
println(t.nome)
println(t.idade)
}
| jcranky/saz | src/main/scala/saz2/Teste.scala | Scala | gpl-3.0 | 279 |
object NamingExtractorPattern {
class A
object A {
def unapply(a: A): Option[Int] = Some(3)
}
new A match {
case a@A(3) => /*start*/a/*end*/
}
}
//NamingExtractorPattern.A | katejim/intellij-scala | testdata/typeInference/pattern/NamingExtractorPattern.scala | Scala | apache-2.0 | 189 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util.collection
import java.io._
import java.util.Comparator
import scala.collection.BufferedIterator
import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import com.google.common.io.ByteStreams
import org.apache.spark.{Logging, SparkEnv}
import org.apache.spark.annotation.DeveloperApi
import org.apache.spark.serializer.{DeserializationStream, Serializer}
import org.apache.spark.storage.{BlockId, BlockManager}
import org.apache.spark.util.collection.ExternalAppendOnlyMap.HashComparator
import org.apache.spark.executor.ShuffleWriteMetrics
/**
* :: DeveloperApi ::
* An append-only map that spills sorted content to disk when there is insufficient space for it
* to grow.
*
* This map takes two passes over the data:
*
* (1) Values are merged into combiners, which are sorted and spilled to disk as necessary
* (2) Combiners are read from disk and merged together
*
* The setting of the spill threshold faces the following trade-off: If the spill threshold is
* too high, the in-memory map may occupy more memory than is available, resulting in OOM.
* However, if the spill threshold is too low, we spill frequently and incur unnecessary disk
* writes. This may lead to a performance regression compared to the normal case of using the
* non-spilling AppendOnlyMap.
*
* Two parameters control the memory threshold:
*
* `spark.shuffle.memoryFraction` specifies the collective amount of memory used for storing
* these maps as a fraction of the executor's total memory. Since each concurrently running
* task maintains one map, the actual threshold for each map is this quantity divided by the
* number of running tasks.
*
* `spark.shuffle.safetyFraction` specifies an additional margin of safety as a fraction of
* this threshold, in case map size estimation is not sufficiently accurate.
*/
@DeveloperApi
class ExternalAppendOnlyMap[K, V, C](
createCombiner: V => C,
mergeValue: (C, V) => C,
mergeCombiners: (C, C) => C,
serializer: Serializer = SparkEnv.get.serializer,
blockManager: BlockManager = SparkEnv.get.blockManager)
extends Iterable[(K, C)]
with Serializable
with Logging
with Spillable[SizeTracker] {
private var currentMap = new SizeTrackingAppendOnlyMap[K, C]
private val spilledMaps = new ArrayBuffer[DiskMapIterator]
private val sparkConf = SparkEnv.get.conf
private val diskBlockManager = blockManager.diskBlockManager
/**
* Size of object batches when reading/writing from serializers.
*
* Objects are written in batches, with each batch using its own serialization stream. This
* cuts down on the size of reference-tracking maps constructed when deserializing a stream.
*
* NOTE: Setting this too low can cause excessive copying when serializing, since some serializers
* grow internal data structures by growing + copying every time the number of objects doubles.
*/
private val serializerBatchSize = sparkConf.getLong("spark.shuffle.spill.batchSize", 10000)
// Number of bytes spilled in total
private var _diskBytesSpilled = 0L
// Use getSizeAsKb (not bytes) to maintain backwards compatibility if no units are provided
private val fileBufferSize =
sparkConf.getSizeAsKb("spark.shuffle.file.buffer", "32k").toInt * 1024
// Write metrics for current spill
private var curWriteMetrics: ShuffleWriteMetrics = _
private val keyComparator = new HashComparator[K]
private val ser = serializer.newInstance()
/**
* Insert the given key and value into the map.
*/
def insert(key: K, value: V): Unit = {
insertAll(Iterator((key, value)))
}
/**
* Insert the given iterator of keys and values into the map.
*
* When the underlying map needs to grow, check if the global pool of shuffle memory has
* enough room for this to happen. If so, allocate the memory required to grow the map;
* otherwise, spill the in-memory map to disk.
*
* The shuffle memory usage of the first trackMemoryThreshold entries is not tracked.
*/
def insertAll(entries: Iterator[Product2[K, V]]): Unit = {
// An update function for the map that we reuse across entries to avoid allocating
// a new closure each time
var curEntry: Product2[K, V] = null
val update: (Boolean, C) => C = (hadVal, oldVal) => {
if (hadVal) mergeValue(oldVal, curEntry._2) else createCombiner(curEntry._2)
}
while (entries.hasNext) {
curEntry = entries.next()
if (maybeSpill(currentMap, currentMap.estimateSize())) {
currentMap = new SizeTrackingAppendOnlyMap[K, C]
}
currentMap.changeValue(curEntry._1, update)
addElementsRead()
}
}
/**
* Insert the given iterable of keys and values into the map.
*
* When the underlying map needs to grow, check if the global pool of shuffle memory has
* enough room for this to happen. If so, allocate the memory required to grow the map;
* otherwise, spill the in-memory map to disk.
*
* The shuffle memory usage of the first trackMemoryThreshold entries is not tracked.
*/
def insertAll(entries: Iterable[Product2[K, V]]): Unit = {
insertAll(entries.iterator)
}
/**
* Sort the existing contents of the in-memory map and spill them to a temporary file on disk.
*/
override protected[this] def spill(collection: SizeTracker): Unit = {
val (blockId, file) = diskBlockManager.createTempLocalBlock()
curWriteMetrics = new ShuffleWriteMetrics()
var writer = blockManager.getDiskWriter(blockId, file, ser, fileBufferSize, curWriteMetrics)
var objectsWritten = 0
// List of batch sizes (bytes) in the order they are written to disk
val batchSizes = new ArrayBuffer[Long]
// Flush the disk writer's contents to disk, and update relevant variables
def flush(): Unit = {
val w = writer
writer = null
w.commitAndClose()
_diskBytesSpilled += curWriteMetrics.shuffleBytesWritten
batchSizes.append(curWriteMetrics.shuffleBytesWritten)
objectsWritten = 0
}
var success = false
try {
val it = currentMap.destructiveSortedIterator(keyComparator)
while (it.hasNext) {
val kv = it.next()
writer.write(kv._1, kv._2)
objectsWritten += 1
if (objectsWritten == serializerBatchSize) {
flush()
curWriteMetrics = new ShuffleWriteMetrics()
writer = blockManager.getDiskWriter(blockId, file, ser, fileBufferSize, curWriteMetrics)
}
}
if (objectsWritten > 0) {
flush()
} else if (writer != null) {
val w = writer
writer = null
w.revertPartialWritesAndClose()
}
success = true
} finally {
if (!success) {
// This code path only happens if an exception was thrown above before we set success;
// close our stuff and let the exception be thrown further
if (writer != null) {
writer.revertPartialWritesAndClose()
}
if (file.exists()) {
file.delete()
}
}
}
spilledMaps.append(new DiskMapIterator(file, blockId, batchSizes))
}
def diskBytesSpilled: Long = _diskBytesSpilled
/**
* Return an iterator that merges the in-memory map with the spilled maps.
* If no spill has occurred, simply return the in-memory map's iterator.
*/
override def iterator: Iterator[(K, C)] = {
if (spilledMaps.isEmpty) {
currentMap.iterator
} else {
new ExternalIterator()
}
}
/**
* An iterator that sort-merges (K, C) pairs from the in-memory map and the spilled maps
*/
private class ExternalIterator extends Iterator[(K, C)] {
// A queue that maintains a buffer for each stream we are currently merging
// This queue maintains the invariant that it only contains non-empty buffers
private val mergeHeap = new mutable.PriorityQueue[StreamBuffer]
// Input streams are derived both from the in-memory map and spilled maps on disk
// The in-memory map is sorted in place, while the spilled maps are already in sorted order
private val sortedMap = currentMap.destructiveSortedIterator(keyComparator)
private val inputStreams = (Seq(sortedMap) ++ spilledMaps).map(it => it.buffered)
inputStreams.foreach { it =>
val kcPairs = new ArrayBuffer[(K, C)]
readNextHashCode(it, kcPairs)
if (kcPairs.length > 0) {
mergeHeap.enqueue(new StreamBuffer(it, kcPairs))
}
}
/**
* Fill a buffer with the next set of keys with the same hash code from a given iterator. We
* read streams one hash code at a time to ensure we don't miss elements when they are merged.
*
* Assumes the given iterator is in sorted order of hash code.
*
* @param it iterator to read from
* @param buf buffer to write the results into
*/
private def readNextHashCode(it: BufferedIterator[(K, C)], buf: ArrayBuffer[(K, C)]): Unit = {
if (it.hasNext) {
var kc = it.next()
buf += kc
val minHash = hashKey(kc)
while (it.hasNext && it.head._1.hashCode() == minHash) {
kc = it.next()
buf += kc
}
}
}
/**
* If the given buffer contains a value for the given key, merge that value into
* baseCombiner and remove the corresponding (K, C) pair from the buffer.
*/
private def mergeIfKeyExists(key: K, baseCombiner: C, buffer: StreamBuffer): C = {
var i = 0
while (i < buffer.pairs.length) {
val pair = buffer.pairs(i)
if (pair._1 == key) {
// Note that there's at most one pair in the buffer with a given key, since we always
// merge stuff in a map before spilling, so it's safe to return after the first we find
removeFromBuffer(buffer.pairs, i)
return mergeCombiners(baseCombiner, pair._2)
}
i += 1
}
baseCombiner
}
/**
* Remove the index'th element from an ArrayBuffer in constant time, swapping another element
* into its place. This is more efficient than the ArrayBuffer.remove method because it does
* not have to shift all the elements in the array over. It works for our array buffers because
* we don't care about the order of elements inside, we just want to search them for a key.
*/
private def removeFromBuffer[T](buffer: ArrayBuffer[T], index: Int): T = {
val elem = buffer(index)
buffer(index) = buffer(buffer.size - 1) // This also works if index == buffer.size - 1
buffer.reduceToSize(buffer.size - 1)
elem
}
/**
* Return true if there exists an input stream that still has unvisited pairs.
*/
override def hasNext: Boolean = mergeHeap.length > 0
/**
* Select a key with the minimum hash, then combine all values with the same key from all
* input streams.
*/
override def next(): (K, C) = {
if (mergeHeap.length == 0) {
throw new NoSuchElementException
}
// Select a key from the StreamBuffer that holds the lowest key hash
val minBuffer = mergeHeap.dequeue()
val minPairs = minBuffer.pairs
val minHash = minBuffer.minKeyHash
val minPair = removeFromBuffer(minPairs, 0)
val minKey = minPair._1
var minCombiner = minPair._2
assert(hashKey(minPair) == minHash)
// For all other streams that may have this key (i.e. have the same minimum key hash),
// merge in the corresponding value (if any) from that stream
val mergedBuffers = ArrayBuffer[StreamBuffer](minBuffer)
while (mergeHeap.length > 0 && mergeHeap.head.minKeyHash == minHash) {
val newBuffer = mergeHeap.dequeue()
minCombiner = mergeIfKeyExists(minKey, minCombiner, newBuffer)
mergedBuffers += newBuffer
}
// Repopulate each visited stream buffer and add it back to the queue if it is non-empty
mergedBuffers.foreach { buffer =>
if (buffer.isEmpty) {
readNextHashCode(buffer.iterator, buffer.pairs)
}
if (!buffer.isEmpty) {
mergeHeap.enqueue(buffer)
}
}
(minKey, minCombiner)
}
/**
* A buffer for streaming from a map iterator (in-memory or on-disk) sorted by key hash.
* Each buffer maintains all of the key-value pairs with what is currently the lowest hash
* code among keys in the stream. There may be multiple keys if there are hash collisions.
* Note that because when we spill data out, we only spill one value for each key, there is
* at most one element for each key.
*
* StreamBuffers are ordered by the minimum key hash currently available in their stream so
* that we can put them into a heap and sort that.
*/
private class StreamBuffer(
val iterator: BufferedIterator[(K, C)],
val pairs: ArrayBuffer[(K, C)])
extends Comparable[StreamBuffer] {
def isEmpty: Boolean = pairs.length == 0
// Invalid if there are no more pairs in this stream
def minKeyHash: Int = {
assert(pairs.length > 0)
hashKey(pairs.head)
}
override def compareTo(other: StreamBuffer): Int = {
// descending order because mutable.PriorityQueue dequeues the max, not the min
if (other.minKeyHash < minKeyHash) -1 else if (other.minKeyHash == minKeyHash) 0 else 1
}
}
}
/**
* An iterator that returns (K, C) pairs in sorted order from an on-disk map
*/
private class DiskMapIterator(file: File, blockId: BlockId, batchSizes: ArrayBuffer[Long])
extends Iterator[(K, C)]
{
private val batchOffsets = batchSizes.scanLeft(0L)(_ + _) // Size will be batchSize.length + 1
assert(file.length() == batchOffsets.last,
"File length is not equal to the last batch offset:\\n" +
s" file length = ${file.length}\\n" +
s" last batch offset = ${batchOffsets.last}\\n" +
s" all batch offsets = ${batchOffsets.mkString(",")}"
)
private var batchIndex = 0 // Which batch we're in
private var fileStream: FileInputStream = null
// An intermediate stream that reads from exactly one batch
// This guards against pre-fetching and other arbitrary behavior of higher level streams
private var deserializeStream = nextBatchStream()
private var nextItem: (K, C) = null
private var objectsRead = 0
/**
* Construct a stream that reads only from the next batch.
*/
private def nextBatchStream(): DeserializationStream = {
// Note that batchOffsets.length = numBatches + 1 since we did a scan above; check whether
// we're still in a valid batch.
if (batchIndex < batchOffsets.length - 1) {
if (deserializeStream != null) {
deserializeStream.close()
fileStream.close()
deserializeStream = null
fileStream = null
}
val start = batchOffsets(batchIndex)
fileStream = new FileInputStream(file)
fileStream.getChannel.position(start)
batchIndex += 1
val end = batchOffsets(batchIndex)
assert(end >= start, "start = " + start + ", end = " + end +
", batchOffsets = " + batchOffsets.mkString("[", ", ", "]"))
val bufferedStream = new BufferedInputStream(ByteStreams.limit(fileStream, end - start))
val compressedStream = blockManager.wrapForCompression(blockId, bufferedStream)
ser.deserializeStream(compressedStream)
} else {
// No more batches left
cleanup()
null
}
}
/**
* Return the next (K, C) pair from the deserialization stream.
*
* If the current batch is drained, construct a stream for the next batch and read from it.
* If no more pairs are left, return null.
*/
private def readNextItem(): (K, C) = {
try {
val k = deserializeStream.readKey().asInstanceOf[K]
val c = deserializeStream.readValue().asInstanceOf[C]
val item = (k, c)
objectsRead += 1
if (objectsRead == serializerBatchSize) {
objectsRead = 0
deserializeStream = nextBatchStream()
}
item
} catch {
case e: EOFException =>
cleanup()
null
}
}
override def hasNext: Boolean = {
if (nextItem == null) {
if (deserializeStream == null) {
return false
}
nextItem = readNextItem()
}
nextItem != null
}
override def next(): (K, C) = {
val item = if (nextItem == null) readNextItem() else nextItem
if (item == null) {
throw new NoSuchElementException
}
nextItem = null
item
}
// TODO: Ensure this gets called even if the iterator isn't drained.
private def cleanup() {
batchIndex = batchOffsets.length // Prevent reading any other batch
val ds = deserializeStream
deserializeStream = null
fileStream = null
ds.close()
file.delete()
}
}
/** Convenience function to hash the given (K, C) pair by the key. */
private def hashKey(kc: (K, C)): Int = ExternalAppendOnlyMap.hash(kc._1)
}
private[spark] object ExternalAppendOnlyMap {
/**
* Return the hash code of the given object. If the object is null, return a special hash code.
*/
private def hash[T](obj: T): Int = {
if (obj == null) 0 else obj.hashCode()
}
/**
* A comparator which sorts arbitrary keys based on their hash codes.
*/
private class HashComparator[K] extends Comparator[K] {
def compare(key1: K, key2: K): Int = {
val hash1 = hash(key1)
val hash2 = hash(key2)
if (hash1 < hash2) -1 else if (hash1 == hash2) 0 else 1
}
}
}
| andrewor14/iolap | core/src/main/scala/org/apache/spark/util/collection/ExternalAppendOnlyMap.scala | Scala | apache-2.0 | 18,570 |
/***********************************************************************
* Copyright (c) 2013-2022 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.convert2
import java.util.Date
import com.typesafe.scalalogging.LazyLogging
import org.junit.runner.RunWith
import org.locationtech.geomesa.convert2.TypeInference.{DerivedTransform, LatLon}
import org.locationtech.geomesa.utils.text.WKTUtils
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class TypeInferenceTest extends Specification with LazyLogging {
import org.locationtech.geomesa.utils.geotools.ObjectType._
val uuidString = "28a12c18-e5ae-4c04-ae7b-bf7cdbfaf234"
val uuid = java.util.UUID.fromString(uuidString)
val pointString = "POINT(45 55)"
val point = WKTUtils.read(pointString)
val lineStringString = "LINESTRING(-47.28515625 -25.576171875, -48 -26, -49 -27)"
val lineString = WKTUtils.read(lineStringString)
val polygonString = "POLYGON((44 24, 44 28, 49 27, 49 23, 44 24))"
val polygon = WKTUtils.read(polygonString)
val multiPointString = "MULTIPOINT ((10 40), (40 30), (20 20), (30 10))"
val multiPoint = WKTUtils.read(multiPointString)
val multiLineStringString = "MULTILINESTRING ((10 10, 20 20, 10 40),(40 40, 30 30, 40 20, 30 10))"
val multiLineString = WKTUtils.read(multiLineStringString)
val multiPolygonString = "MULTIPOLYGON (((30 20, 45 40, 10 40, 30 20)),((15 5, 40 10, 10 20, 5 10, 15 5)))"
val multiPolygon = WKTUtils.read(multiPolygonString)
val geometryCollectionString = "GEOMETRYCOLLECTION(POINT(4 6),LINESTRING(4 6,7 10))"
val geometryCollection = WKTUtils.read(geometryCollectionString)
"TypeInference" should {
"infer simple types" in {
// note: don't put any valid lat/lon pairs next to each other or it will create a geometry type
val types = TypeInference.infer(Seq(Seq("a", 1, 200L, 1f, 200d, true)))
types.map(_.typed) mustEqual Seq(STRING, INT, LONG, FLOAT, DOUBLE, BOOLEAN)
}
"infer simple types from strings" in {
// note: don't put any valid lat/lon pairs next to each other or it will create a geometry type
val types = TypeInference.infer(Seq(Seq("a", "1", s"${Int.MaxValue.toLong + 1L}", "1.1", "true", "1.00000001")))
types.map(_.typed) mustEqual Seq(STRING, INT, LONG, FLOAT, BOOLEAN, DOUBLE)
}
"infer complex types" in {
val types = TypeInference.infer(Seq(Seq(new Date(), Array[Byte](0), uuid)))
types.map(_.typed) mustEqual Seq(DATE, BYTES, UUID)
}
"infer complex types from strings" in {
val types = TypeInference.infer(Seq(Seq("2018-01-01T00:00:00.000Z", uuidString)))
types.map(_.typed) mustEqual Seq(DATE, UUID)
}
"infer geometry types" in {
val types = TypeInference.infer(Seq(Seq(point, lineString, polygon, multiPoint, multiLineString,
multiPolygon, geometryCollection))).map(_.typed)
types mustEqual Seq(POINT, LINESTRING, POLYGON, MULTIPOINT, MULTILINESTRING, MULTIPOLYGON, GEOMETRY_COLLECTION)
}
"infer geometry types from strings" in {
val types = TypeInference.infer(Seq(Seq(pointString, lineStringString, polygonString, multiPointString,
multiLineStringString, multiPolygonString, geometryCollectionString))).map(_.typed)
types mustEqual Seq(POINT, LINESTRING, POLYGON, MULTIPOINT, MULTILINESTRING, MULTIPOLYGON, GEOMETRY_COLLECTION)
}
"merge up number types" in {
val types = Seq(Seq(1d), Seq(1f), Seq(1L), Seq(1))
foreach(types.drop(0).permutations.toSeq)(t => TypeInference.infer(t).map(_.typed) mustEqual Seq(DOUBLE))
foreach(types.drop(1).permutations.toSeq)(t => TypeInference.infer(t).map(_.typed) mustEqual Seq(FLOAT))
foreach(types.drop(2).permutations.toSeq)(t => TypeInference.infer(t).map(_.typed) mustEqual Seq(LONG))
}
"merge up number types with lat/lon" in {
TypeInference.infer(Seq(Seq(135), Seq(45))).map(_.typed) mustEqual Seq(INT)
TypeInference.infer(Seq(Seq(135f), Seq(45f))).map(_.typed) mustEqual Seq(FLOAT)
TypeInference.infer(Seq(Seq(135d), Seq(45d))).map(_.typed) mustEqual Seq(DOUBLE)
}
"merge up geometry types" in {
val types = Seq(Seq(point), Seq(lineString), Seq(polygon), Seq(multiPoint), Seq(multiLineString),
Seq(multiPolygon), Seq(geometryCollection))
foreach(types.permutations.toSeq)(t => TypeInference.infer(t).map(_.typed) mustEqual Seq(GEOMETRY))
}
"merge up null values" in {
val values = Seq("a", 1, 1L, 1f, 1d, true, new Date(), Seq[Byte](0), uuid, point, lineString,
polygon, multiPoint, multiLineString, multiPolygon, geometryCollection)
foreach(values) { value =>
TypeInference.infer(Seq(Seq(value), Seq(null))) mustEqual TypeInference.infer(Seq(Seq(value)))
}
}
"create points from lon/lat pairs" in {
import LatLon.{Lat, NotLatLon}
val floats = TypeInference.infer(Seq(Seq(45f, 55f, "foo")))
floats.map(_.typed) mustEqual Seq(FLOAT, FLOAT, STRING, POINT)
val doubles = TypeInference.infer(Seq(Seq(45d, 55d, "foo")))
doubles.map(_.typed) mustEqual Seq(DOUBLE, DOUBLE, STRING, POINT)
foreach(Seq(floats, doubles)) { types =>
types.map(_.latlon) mustEqual Seq(Lat, Lat, NotLatLon, NotLatLon)
types(3).transform mustEqual DerivedTransform("point", types(0).name, types(1).name)
}
}
"create points from lat/lon pairs" in {
import LatLon.{Lat, Lon, NotLatLon}
val floats = TypeInference.infer(Seq(Seq(45f, 120f, "foo")))
floats.map(_.typed) mustEqual Seq(FLOAT, FLOAT, STRING, POINT)
val doubles = TypeInference.infer(Seq(Seq(45d, 120d, "foo")))
doubles.map(_.typed) mustEqual Seq(DOUBLE, DOUBLE, STRING, POINT)
foreach(Seq(floats, doubles)) { types =>
types.map(_.latlon) mustEqual Seq(Lat, Lon, NotLatLon, NotLatLon)
types(3).transform mustEqual DerivedTransform("point", types(1).name, types(0).name)
}
}
"create points from named lat/lon fields" in {
import LatLon.{Lat, Lon, NotLatLon}
val floats = TypeInference.infer(Seq(Seq(45f, 120f, 121f, "foo")), Seq("lat", "bar", "lon", "foo"))
floats.map(_.typed) mustEqual Seq(FLOAT, FLOAT, FLOAT, STRING, POINT)
val doubles = TypeInference.infer(Seq(Seq(45d, 120d, 121d, "foo")), Seq("lat", "bar", "lon", "foo"))
doubles.map(_.typed) mustEqual Seq(DOUBLE, DOUBLE, DOUBLE, STRING, POINT)
foreach(Seq(floats, doubles)) { types =>
types.map(_.latlon) mustEqual Seq(Lat, Lon, Lon, NotLatLon, NotLatLon)
types(4).transform mustEqual DerivedTransform("point", types(2).name, types(0).name)
}
}
"not create points from unpaired numbers" in {
TypeInference.infer(Seq(Seq(45f, "foo", 55f))).map(_.typed) mustEqual Seq(FLOAT, STRING, FLOAT)
TypeInference.infer(Seq(Seq(45d, "foo", 55d))).map(_.typed) mustEqual Seq(DOUBLE, STRING, DOUBLE)
}
"not create points if another geometry is present" in {
TypeInference.infer(Seq(Seq(45f, 55f, "POINT (40 50)"))).map(_.typed) mustEqual Seq(FLOAT, FLOAT, POINT)
TypeInference.infer(Seq(Seq(45d, 55d, "POINT (40 50)"))).map(_.typed) mustEqual Seq(DOUBLE, DOUBLE, POINT)
}
"not create points if values are not valid lat/lons" in {
TypeInference.infer(Seq(Seq(145f, 155f, "foo"))).map(_.typed) mustEqual Seq(FLOAT, FLOAT, STRING)
TypeInference.infer(Seq(Seq(145d, 155d, "foo"))).map(_.typed) mustEqual Seq(DOUBLE, DOUBLE, STRING)
}
"infer types despite some failures" in {
TypeInference.infer(Seq.tabulate(11)(i => Seq(i)) :+ Seq("foo")).map(_.typed) mustEqual Seq(INT)
TypeInference.infer(Seq.tabulate(11)(i => Seq(i)) :+ Seq("foo"), failureRate = 0.01f).map(_.typed) mustEqual Seq(STRING)
}
"fall back to string type" in {
TypeInference.infer(Seq(Seq("2018-01-01"), Seq(uuidString))).map(_.typed) mustEqual Seq(STRING)
}
}
}
| locationtech/geomesa | geomesa-convert/geomesa-convert-common/src/test/scala/org/locationtech/geomesa/convert2/TypeInferenceTest.scala | Scala | apache-2.0 | 8,293 |
package models
import models.common.ReferenceTo
import models.customer.{ShopIn, AgentIn}
import play.api.libs.json._
import play.api.libs.functional.syntax._
case class User(username: String, agentId: Option[ReferenceTo[AgentIn]])
object User {
implicit val format = Json.format[User]
}
case class UserCredential(username: String, password: String)
object UserCredential {
implicit val reads = Json.reads[UserCredential]
}
| tsechov/shoehorn | app/models/user.scala | Scala | apache-2.0 | 433 |
/*
* Copyright (c) 2013-2014, ARM Limited
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package com.arm.carp.frontends.vobla
import scala.collection.mutable.HashMap
import com.arm.carp.{ pencil => Pencil }
import com.arm.carp.{ vobla => Vobla}
import com.arm.carp.frontends.vobla.VoblaImplicits._
private class ValueTransformer(
valueType: Vobla.ValueVersion.ValueVersion,
protected val structures: StructProvider,
protected val varTable: HashMap[Vobla.Variable, View] = HashMap[Vobla.Variable, View]())
extends Transformer {
protected val RealType = valueType match {
case Vobla.ValueVersion.FLOAT => FloatType
case Vobla.ValueVersion.DOUBLE => DoubleType
case Vobla.ValueVersion.COMPLEX_FLOAT => FloatType
case Vobla.ValueVersion.COMPLEX_DOUBLE => DoubleType
}
protected val ValueType = valueType match {
case Vobla.ValueVersion.FLOAT => FloatType
case Vobla.ValueVersion.DOUBLE => DoubleType
case Vobla.ValueVersion.COMPLEX_FLOAT => structures.ComplexFloatType
case Vobla.ValueVersion.COMPLEX_DOUBLE => structures.ComplexDoubleType
}
protected val ComplexType = valueType match {
case Vobla.ValueVersion.FLOAT => structures.ComplexFloatType
case Vobla.ValueVersion.DOUBLE => structures.ComplexDoubleType
case Vobla.ValueVersion.COMPLEX_FLOAT => structures.ComplexFloatType
case Vobla.ValueVersion.COMPLEX_DOUBLE => structures.ComplexDoubleType
}
protected def transformScalarType(t: Vobla.ScalarType) = t match {
case Vobla.ValueType(assignable)=> ValueType.updateConst(!assignable)
case Vobla.RealType(assignable) => RealType.updateConst(!assignable)
case Vobla.IndexType(assignable) => IndexType.updateConst(!assignable)
case Vobla.ComplexType(assignable) => ComplexType.updateConst(!assignable)
}
protected def transformIndexCst(i: Int) = Pencil.IntegerConstant(IndexType, i)
protected[vobla] def createArrayVariable(baseType: Pencil.ScalarType, sizes: List[Pencil.ScalarExpression], restrict: Boolean = true,
init: Option[Pencil.ArrayConstant] = None) = {
val t = genArrayType(baseType, sizes)
val v = Pencil.ArrayVariable(t, "array_var", restrict, init)
val factors = List.tabulate(sizes.size, sizes.size)((i, j) => if (i == j) 1 else 0)
(new SimpleArrayView(false, true, Vobla.ArrayPart.Full, v, sizes, sizes, List.fill(sizes.size)(IndexZero), factors), v)
}
protected def genArrayType(baseType: Pencil.ScalarType, sizes: List[Pencil.ScalarExpression]) =
sizes.init.foldRight(Pencil.ArrayType(baseType, sizes.last))((s, base) => Pencil.ArrayType(base, s))
}
| carpproject/vobla | src/scala/com/arm/carp/frontends/vobla/ValueTransformer.scala | Scala | mit | 3,632 |
package pomodoro
import org.eclipse.swt.widgets.Display
sealed trait PomodoroState {
val targetTimeOption: Option[Long]
}
case class InPomodoro(targetTime: Long) extends PomodoroState {
val targetTimeOption = Some(targetTime)
}
case class PomodoroComplete(targetTime: Long) extends PomodoroState {
val targetTimeOption = Some(targetTime)
}
case object NotRunning extends PomodoroState {
val targetTimeOption = None
}
class PomodoroTimerService(initialDuration: Int/* minutes */, initialPomodoroState: PomodoroState) extends BasicListenable {
require(initialDuration > 0 && initialDuration <= 60)
private var pomodoroState_ : PomodoroState = initialPomodoroState
private var pomodoroDuration_ : Minutes = initialDuration
private var currentTimerRunnable: Option[Runnable] = None
pomodoroState.targetTimeOption foreach startTimerRunnable
def timeRemaining: Millis = pomodoroState.targetTimeOption match {
case Some(targetTime) ⇒ targetTime - System.currentTimeMillis
case None ⇒ pomodoroDuration * 60 * 1000
}
def pomodoroDuration = pomodoroDuration_
def pomodoroDuration_=(minutes: Minutes) {
require(pomodoroState == NotRunning)
pomodoroDuration_ = minutes
signalPomodoroDurationChanged(pomodoroDuration)
signalUpdateEvent(timeRemaining, pomodoroState)
}
def pomodoroState = pomodoroState_
def startPomodoro() {
val targetTime = System.currentTimeMillis + pomodoroDuration * 60 * 1000
require(pomodoroState_ == NotRunning)
pomodoroState_ = InPomodoro(targetTime)
startTimerRunnable(targetTime)
}
def startTimerRunnable(targetTime: Millis) {
require(currentTimerRunnable.isEmpty)
val timerRunnable = makeTimerRunnable()
currentTimerRunnable = Some(timerRunnable)
signalPomodoroStarted(targetTime)
signalUpdateEvent(timeRemaining, pomodoroState)
Display.getDefault.timerExec(100, timerRunnable)
}
def makeTimerRunnable(): Runnable = new Runnable() {
def run() = {
currentTimerRunnable = None
val targetTime = pomodoroState.targetTimeOption.get // Check
val timeRemaining = pomodoroState.targetTimeOption.get - System.currentTimeMillis
if (timeRemaining < 0 && pomodoroState.isInstanceOf[InPomodoro]) {
pomodoroState_ = PomodoroComplete(targetTime)
signalPomodoroComplete()
}
val timerRunnable = makeTimerRunnable()
currentTimerRunnable = Some(timerRunnable)
signalUpdateEvent(timeRemaining, pomodoroState)
Display.getDefault.timerExec(1000, timerRunnable)
}
}
def stopPomodoro() {
internalStop()
signalPomodoroStopped()
signalUpdateEvent(timeRemaining, pomodoroState)
}
def dispose() = internalStop()
private def internalStop() {
for (timerRunnable ← currentTimerRunnable)
Display.getDefault.timerExec(-1, timerRunnable)
currentTimerRunnable = None
pomodoroState_ = NotRunning
}
}
trait BasicListenable {
trait Listener {
def updated(timeRemaining: Millis, pomodoroState: PomodoroState) {}
def pomodoroStarted(targetTime: Millis) {}
def pomodoroStopped() {}
def pomodoroComplete() {}
def pomodoroDurationChanged(duration: Minutes) {}
}
private var listeners = Set[Listener]()
def addListener(listener: Listener) { listeners = listeners + listener }
def removeListener(listener: Listener) { listeners = listeners - listener }
protected def signalUpdateEvent(timeRemaining: Millis, pomodoroState: PomodoroState) = listeners foreach { _.updated(timeRemaining, pomodoroState) }
protected def signalPomodoroStarted(targetTime: Millis) = listeners foreach { _.pomodoroStarted(targetTime) }
protected def signalPomodoroStopped() = listeners foreach { _.pomodoroStopped }
protected def signalPomodoroComplete() = listeners foreach { _.pomodoroComplete }
protected def signalPomodoroDurationChanged(duration: Minutes) = listeners foreach { _.pomodoroDurationChanged(duration) }
}
| mdr/pomodoro4eclipse | pomodoro4eclipse/src/pomodoro/PomodoroTimerService.scala | Scala | mit | 3,974 |
package org.apache.spark.streaming.talos.offset
import java.io.{File, FileOutputStream}
import scala.collection.mutable
import org.apache.commons.io.{FileUtils, IOUtils}
import org.apache.hadoop.conf.Configuration
import org.apache.spark.streaming.Time
import org.apache.spark.streaming.talos.{TalosCluster, TopicPartition}
import org.apache.spark.util.Utils
import org.scalatest.{BeforeAndAfter, FunSuite}
import com.xiaomi.infra.galaxy.rpc.thrift.Credential
import com.xiaomi.infra.galaxy.talos.thrift.TopicTalosResourceName
class HDFSOffsetDAOSuite extends FunSuite with BeforeAndAfter {
private val dir = "/tmp/galaxy-talos-spark-test/"
private val topic = "test"
private val topicTalosResourceName = s"CL123#$topic#abc"
private var dao: HDFSOffsetDAO = _
before {
val tc = new TalosCluster(Map.empty[String, String], new Credential())
TalosCluster._topicResourceNames.put(topic, new TopicTalosResourceName(topicTalosResourceName))
dao = new HDFSOffsetDAO(tc, dir, new Configuration())
FileUtils.deleteDirectory(new File(dir))
new File(dir).mkdir()
}
after {
dao = null
// FileUtils.deleteDirectory(new File(dir))
}
test("get offset files ordered by oldest-first.") {
val Array(file000, file001, file002) = Array("offset-000", "offset-001", "offset-002")
new File(dir + file002).createNewFile()
new File(dir + file001).createNewFile()
new File(dir + file000).createNewFile()
val offsetFiles = dao.getOffsetFiles().toArray
assert(offsetFiles(0).getName.endsWith(file000))
assert(offsetFiles(1).getName.endsWith(file001))
assert(offsetFiles(2).getName.endsWith(file002))
}
test("save offsets") {
(1 to 11).foreach { i =>
val time = Time(i.toLong)
val offsets = Map(
TopicPartition(topic, 0) -> 1L,
TopicPartition(topic, 1) -> 2L
)
dao.doSave(time, offsets)
}
import scala.collection.JavaConverters._
val filesNum = FileUtils.listFiles(new File(dir), null, false)
.asScala.filter(_.getName.startsWith("offset")).size
assert(filesNum == 10)
val files = dao.getOffsetFiles()
assert(files.head.getName === "offset-2")
assert(files.last.getName == "offset-11")
}
test("restore offsets from topic") {
val emptyOffsets = dao.restore()
assert(emptyOffsets.isEmpty)
val offsets = Map(
(topic, 0) -> 1L,
(topic, 1) -> 2L
)
val offsetFile = new File(dir + "offset-000")
IOUtils.write(Utils.serialize(offsets), new FileOutputStream(offsetFile))
val restoredOffsets = dao.restore()
assert(restoredOffsets.get === offsets.map { case (key, value) =>
TopicPartition(key._1, key._2) -> value
})
}
test("restore offsets from topic talos resource name") {
val offsets = Map(
(topicTalosResourceName, 0) -> 1L,
(topicTalosResourceName, 1) -> 2L
)
val offsetFile = new File(dir + "offset-000")
IOUtils.write(Utils.serialize(offsets), new FileOutputStream(offsetFile))
val restoredOffsets = dao.restore()
assert(restoredOffsets.get === offsets.map { case (key, value) =>
TopicPartition(topic, key._2) -> value
})
}
test("restore offsets in case of topic recreation") {
val newTopicTalosResourceName = s"CL456#$topic#xxxx"
val offsets = Map(
(newTopicTalosResourceName, 0) -> 1L,
(newTopicTalosResourceName, 1) -> 2L
)
val offsetFile = new File(dir + "offset-000")
IOUtils.write(Utils.serialize(offsets), new FileOutputStream(offsetFile))
val restoredOffsets = dao.restore()
assert(restoredOffsets.isEmpty)
}
}
| XiaoMi/galaxy-sdk-java | galaxy-talos-client/galaxy-talos-spark/src/test/scala/org/apache/spark/streaming/talos/offset/HDFSOffsetDAOSuite.scala | Scala | apache-2.0 | 3,627 |
package jp.ijufumi.openreports.service.settings
import java.nio.file.{FileSystems, Files}
import jp.ijufumi.openreports.model.{TReportTemplate, TReportTemplateHistory}
import jp.ijufumi.openreports.service.OutputFilePath
import jp.ijufumi.openreports.service.enums.StatusCode
import jp.ijufumi.openreports.service.support.ConnectionFactory
import jp.ijufumi.openreports.vo.{ReportTemplateHistoryInfo, ReportTemplateInfo}
import org.joda.time.DateTime
import scalikejdbc.DB
import skinny.Logging
import skinny.micro.multipart.FileItem
class ReportTemplateSettingsService extends Logging {
val blankTemplate = new ReportTemplateInfo(0L, "")
def getReportTemplates: Array[ReportTemplateInfo] = {
TReportTemplate
.findAll()
.map(r => ReportTemplateInfo(r.templateId, r.fileName)).toArray
}
def getReportTemplate(templateId: Long): ReportTemplateInfo = {
TReportTemplate
.findById(templateId)
.map(r => ReportTemplateInfo(r.templateId, r.fileName)).getOrElse(blankTemplate)
}
def getHistories(templateId: Long): Array[ReportTemplateHistoryInfo] = {
TReportTemplateHistory
.where('templateId -> templateId)
.apply()
.sortBy(_.historyId)
.map(r => ReportTemplateHistoryInfo(r.historyId, r.templateId, r.fileName, r.createdAt)).toArray
}
def uploadFile(file: FileItem): StatusCode.Value = {
val templates = TReportTemplate.where('fileName -> file.name).apply()
val filePath = "%s_%s".format(DateTime.now().toString("yyyyMMddHHmmss"), file.name)
val db = DB(ConnectionFactory.getConnection)
try {
db.begin()
if (templates.isEmpty) {
TReportTemplate
.createWithAttributes('fileName -> file.name, 'filePath -> filePath)
} else {
val template = templates.head
TReportTemplate
.updateByIdAndVersion(template.templateId, template.versions)
.withAttributes(
'fileName -> file.name,
'filePath -> filePath,
'updatedAt -> DateTime
.now()
)
TReportTemplateHistory
.createWithAttributes(
'templateId -> template.templateId,
'fileName -> template.fileName,
'filePath -> template.filePath,
'createdAt -> template.createdAt,
'updatedAt -> template.updatedAt,
'versions -> template.versions
)
}
val fullPath = FileSystems.getDefault.getPath(OutputFilePath, filePath)
if (!Files.exists(fullPath.getParent)) {
Files.createDirectories(fullPath.getParent)
}
logger.debug("filePath:%s".format(fullPath.toString))
file.write(fullPath.toFile)
StatusCode.OK
} catch {
case e: Throwable => {
db.rollback()
logger.error("file upload failed.", e)
StatusCode.OTHER_ERROR
}
}
}
}
| ijufumi/openreports_scala | src/main/scala/jp/ijufumi/openreports/service/settings/ReportTemplateSettingsService.scala | Scala | mit | 2,875 |
package arx.core.vec;
@SerialVersionUID(9223372036854770000L)
class Vec2f extends ReadVec2f{
def this(xa : Float,ya : Float){
this()
xi = xa
yi = ya
}
override def x_= ( s : Float ) { xi = s }
override def y_= ( s : Float ) { yi = s }
def +=(v : ReadVec2f) { xi += v.xi;yi += v.yi}
def +=(s : Float) {xi += s;yi += s}
def -=(v : ReadVec2f) { xi -= v.xi;yi -= v.yi}
def -=(s : Float) {xi -= s;yi -= s}
def *=(v : ReadVec2f) { xi *= v.xi;yi *= v.yi}
def *=(s : Float) {xi *= s;yi *= s}
def /=(v : ReadVec2f) { xi /= v.xi;yi /= v.yi}
def /=(s : Float) {xi /= s;yi /= s}
override def +(v : ReadVec2f) = new Vec2f(xi + v.xi,yi + v.yi)
override def +(s : Float) = new Vec2f(xi + s,yi + s)
override def -(v : ReadVec2f) = new Vec2f(xi - v.xi,yi - v.yi)
override def -(s : Float) = new Vec2f(xi - s,yi - s)
override def *(v : ReadVec2f) = new Vec2f(xi * v.xi,yi * v.yi)
override def *(s : Float) = new Vec2f(xi * s,yi * s)
override def /(v : ReadVec2f) = new Vec2f(xi / v.xi,yi / v.yi)
override def /(s : Float) = new Vec2f(xi / s,yi / s)
def update (i:Int,s:Float) { i match {
case 0 => x = s
case 1 => y = s
case _ =>
}}
}
object Vec2f{
def apply (xa : Float,ya : Float) = new Vec2f(xa : Float,ya : Float)
def apply (v : ReadVec2f) = new Vec2f(v.x,v.y)
implicit def toWriteable (v : ReadVec2f) = new Vec2f(v.x,v.y)
def apply (s : Float) = new Vec2f(s,s)
val UnitX = new ReadVec2f(1,0)
val UnitY = new ReadVec2f(0,1)
object _Identity extends ReadVec2f(1,1) {
override def * ( v : ReadVec2f ) = v
def * ( v : Vec2f ) = v
}
val Identity : ReadVec2f = _Identity
val One : ReadVec2f = _Identity
object _Zero extends ReadVec2f{
override def * ( v : ReadVec2f ) = this
def * ( v : Vec2f ) = this
override def / ( v : ReadVec2f ) = this
def / ( v : Vec2f ) = this
override def + ( v : ReadVec2f ) = v
def + ( v : Vec2f ) = v
}
val Zero : ReadVec2f = _Zero
}
| nonvirtualthunk/arx-core | src/main/scala/arx/core/vec/Vec2f.scala | Scala | bsd-2-clause | 1,910 |
/*
* Copyright 2012-2020 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package laika.parse.text
import cats.data.NonEmptySet
import laika.ast.~
import laika.parse.{Parsed, Parser, SourceCursor, SourceFragment}
/** A parser that is associated with a non-empty set of trigger
* characters for performance optimizations.
*
* There is usually no need to create such a parser manually,
* as some of the basic building blocks in `TextParsers` create
* such a parser (e.g. the `literal`, `oneOf` or `someOf`
* parsers).
*
* This set only has only an effect when this parser is used in
* an optimized parser for recursive spans, meaning it is
* either registered as a top-level parser (with `SpanParser.standalone`
* or `SpanParser.recursive`) or passed to a custom span parser
* with `InlineParser.embed`. In all other use cases this
* parser behaves just like plain parser.
*
* @author Jens Halm
*/
trait PrefixedParser[+T] extends Parser[T] { self =>
/** The set of trigger characters that can start this parser.
*/
def startChars: NonEmptySet[Char]
/** The underlying parser that may be optimized based on the specified
* start characters.
*/
def underlying: Parser[T]
def parse (in: SourceCursor): Parsed[T] = underlying.parse(in)
override def ~ [U] (p: Parser[U]): PrefixedParser[T ~ U] = PrefixedParser(startChars)(super.~(p))
override def ~> [U] (p: Parser[U]): PrefixedParser[U] = PrefixedParser(startChars)(super.~>(p))
override def <~[U] (p: Parser[U]): PrefixedParser[T] = PrefixedParser(startChars)(super.<~(p))
override def ~ (value: String): PrefixedParser[T ~ String] = this.~(TextParsers.literal(value))
override def ~> (value: String): PrefixedParser[String] = this.~>(TextParsers.literal(value))
override def <~ (value: String): PrefixedParser[T] = this.<~(TextParsers.literal(value))
override def flatMap[U] (f: T => Parser[U]): PrefixedParser[U] = PrefixedParser(startChars)(super.flatMap(f))
override def >>[U] (fq: T => Parser[U]): PrefixedParser[U] = PrefixedParser(startChars)(super.flatMap(fq))
override def map[U] (f: T => U): PrefixedParser[U] = PrefixedParser(startChars)(super.map(f))
override def ^^[U] (f: T => U): PrefixedParser[U] = PrefixedParser(startChars)(super.map(f))
override def as[U] (v: => U): PrefixedParser[U] = PrefixedParser(startChars)(super.as(v))
override def evalMap[U] (f: T => Either[String, U]): PrefixedParser[U] = PrefixedParser(startChars)(super.evalMap(f))
override def collect[U, V >: T] (f: PartialFunction[T, U],
error: V => String = (r:V) => s"Constructor function not defined at $r"): PrefixedParser[U] =
PrefixedParser(startChars)(super.collect(f, error))
/** Applies the specified parser when this parser fails.
*
* `a.orElse(b)` succeeds if either of the parsers succeeds.
*
* This is a specialized variant of the `orElse` method of the
* base trait that preserves the nature of the `PrefixedParser`
* if both original parsers implement this trait.
*/
def orElse[U >: T] (p: => PrefixedParser[U]): PrefixedParser[U] = PrefixedParser(startChars ++ p.startChars)(super.orElse(p))
/** Applies the specified parser when this parser fails.
*
* `a | b` succeeds if either of the parsers succeeds.
*
* This is a specialized variant of the `|` method of the
* base trait that preserves the nature of the `PrefixedParser`
* if both original parsers implement this trait.
*/
def | [U >: T] (p: => PrefixedParser[U]): PrefixedParser[U] = PrefixedParser(startChars ++ p.startChars)(super.orElse(p))
override def | (value: String)(implicit ev: T <:< String): PrefixedParser[String] = map(ev).orElse(TextParsers.literal(value))
override def withCursor: PrefixedParser[(T, SourceFragment)] = PrefixedParser(startChars)(super.withCursor)
override def cursor: PrefixedParser[SourceFragment] = PrefixedParser(startChars)(super.cursor)
override def source: PrefixedParser[String] = PrefixedParser(startChars)(super.source)
}
/** Factories and utilities for creating or processing PrefixedParser instances.
*/
object PrefixedParser {
import cats.implicits._
/** Creates a new parser that is only triggered when a character in the specified
* set is seen on the input.
*/
def apply[U] (sc: NonEmptySet[Char])(p: Parser[U]): PrefixedParser[U] = new PrefixedParser[U] {
def startChars: NonEmptySet[Char] = sc
override def underlying = p
}
/** Creates a new parser that is only triggered when one of the specified characters
* is seen on the input.
*/
def apply[U] (char: Char, chars: Char*)(p: Parser[U]): PrefixedParser[U] = new PrefixedParser[U] {
def startChars: NonEmptySet[Char] = NonEmptySet.of(char, chars:_*)
override def underlying = p
}
/** Creates a mapping from start characters to their corresponding parser
* from the specified sequence of PrefixedParsers. If a character is
* a trigger for more than one parser they will be combined using `orElse`
* where the parser which comes first in the sequence has higher precedence.
*/
def mapAndMerge[T] (parsers: Seq[PrefixedParser[T]]): Map[Char, Parser[T]] = parsers
.flatMap { parserDef =>
parserDef.startChars.toList.map(c => (c, parserDef))
}
.groupBy(_._1)
.map {
case (char, definitions) => (char, definitions.map(_._2).reduceLeft(_ | _))
}
private[laika] def fromLegacyMap[T] (map: Map[Char, Parser[T]]): Seq[PrefixedParser[T]] =
map.toSeq.map { case (c, p) => PrefixedParser(c)(p) }
}
| planet42/Laika | core/shared/src/main/scala/laika/parse/text/PrefixedParser.scala | Scala | apache-2.0 | 6,283 |
/***********************************************************************
* Copyright (c) 2013-2017 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.hbase.tools.status
import com.beust.jcommander.Parameters
import org.locationtech.geomesa.hbase.data.HBaseDataStore
import org.locationtech.geomesa.hbase.tools.HBaseDataStoreCommand
import org.locationtech.geomesa.tools.status.DescribeSchemaCommand
import org.locationtech.geomesa.tools.{CatalogParam, RequiredTypeNameParam}
class HBaseDescribeSchemaCommand extends DescribeSchemaCommand[HBaseDataStore] with HBaseDataStoreCommand {
override val params = new HBaseDescribeSchemaParams
}
@Parameters(commandDescription = "Describe the attributes of a given GeoMesa feature type")
class HBaseDescribeSchemaParams extends CatalogParam with RequiredTypeNameParam
| ronq/geomesa | geomesa-hbase/geomesa-hbase-tools/src/main/scala/org/locationtech/geomesa/hbase/tools/status/HBaseDescribeSchemaCommand.scala | Scala | apache-2.0 | 1,162 |
import java.awt.{Container, BorderLayout, Dimension}
import java.awt.event.{ActionEvent, ActionListener}
import java.nio.file.{Paths, Files}
import javax.swing._
import play.api.libs.json.Json
import scala.util.Try
case class Question(id: String, prompt: Seq[String], text: Seq[String], options: Seq[String])
object LastDream {
implicit val questionFormat = Json.format[Question]
val useGUI = false
val files = Seq("matt.json", "christian.json", "terry.json")
def main(args: Array[String]): Unit = {
val root = Paths.get(getClass.getResource("stories").toURI)
val questions = files.map(root.resolve) flatMap { path =>
val bytes = Files.readAllBytes(path)
Json.parse(bytes).as[Seq[Question]]
}
val questionsById = (questions map { q => (q.id, q) }).toMap
if (useGUI) createAndShowGUI(questionsById)
else {
val start = questionsById("start")
var next = ask(start, questionsById)
while (next.nonEmpty) {
next = ask(next.get, questionsById)
}
}
}
def ask(question: Question, questionsById: Map[String, Question]): Option[Question] = {
println()
question.text.foreach(println)
println()
val options = question.options.map(questionsById)
options.zipWithIndex.foreach { case (q, i) =>
println(s"${i + 1}) ${q.prompt.mkString("\\n")}")
}
print("> ")
Try(readLine().trim.toInt).toOption map { n =>
val choice = Try(options.toSeq(n - 1)).toOption
choice map { q =>
if (q.id.endsWith("_end")) {
q.text.foreach(println)
None
} else Some(q)
} getOrElse {
println("Invalid option!")
Some(question)
}
} getOrElse {
println("Invalid option, please enter a number")
Some(question)
}
}
def createAndShowGUI(questionsById: Map[String, Question]): Unit = {
val frame = new JFrame("Title")
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE)
def setContent(pane: Container, question: Question): Unit = {
val options = question.options.map(questionsById)
val buttons = options.zipWithIndex.map { case (q, i) =>
val button = new JButton(q.prompt.mkString("\\n"))
button.addActionListener(new ActionListener() {
override def actionPerformed(e: ActionEvent): Unit = {
val choice = options.toSeq(i)
if (!choice.id.endsWith("_end")) {
pane.removeAll()
setContent(pane, choice)
}
}
})
button
}
val buttonPanel = new JPanel
buttons.foreach(buttonPanel.add)
val textArea = new JTextArea(question.text.mkString("\\n"))
textArea.setEditable(false)
textArea.setLineWrap(true)
textArea.setWrapStyleWord(true)
textArea.setColumns(20)
textArea.setRows(10)
val scrollPane = new JScrollPane(textArea)
scrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS)
scrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER)
pane.add(scrollPane, BorderLayout.PAGE_START)
pane.add(buttonPanel, BorderLayout.CENTER)
val min = new Dimension(250, 300)
val max = new Dimension(500, 300)
frame.setMinimumSize(min)
frame.setMaximumSize(max)
frame.setSize(min)
frame.setVisible(true)
}
val firstQuestion = questionsById("start")
setContent(frame.getContentPane, firstQuestion)
}
}
| EDawg878/LastDream | src/main/scala/LastDream.scala | Scala | gpl-2.0 | 3,497 |
/*
* Copyright 2017 Zhang Di
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.dizhang.seqspark.ds
@SerialVersionUID(7737230001L)
class Bed (arg1: Array[Byte], arg2: Array[Byte]) extends Serializable {
/**
* This class is for holding the output buf,
* we don't care about random access, just use two
* Byte arrays for bim and bed
*/
val bim = arg1
val bed = arg2
}
object Bed {
def apply(): Bed = {
new Bed(Array[Byte](), Array[Byte]())
}
def apply(v: Variant[Byte]): Bed = {
def makeBed (g: Byte): Byte = {
/** not functional right now */
0
}
val id = "%s-%s" format(v.chr, v.pos)
val bim: Array[Byte] =
"%s\\t%s\\t%d\\t%s\\t%s\\t%s\\n"
.format(v.chr,id,0,v.pos,v.ref,v.alt)
.toArray
.map(_.toByte)
val bed: Array[Byte] =
for {
i <- Array[Int]() ++ (0 to v.length/4)
four = 0 to 3 map (j => if (4 * i + j < v.length) makeBed(v(4 * i + j)) else 0.toByte)
} yield
four.zipWithIndex.map(a => a._1 << 2 * a._2).sum.toByte
new Bed(bim, bed)
}
def add(a: Bed, b: Bed): Bed =
new Bed(a.bim ++ b.bim, a.bed ++ b.bed)
}
| statgenetics/seqspark | src/main/scala/org/dizhang/seqspark/ds/Bed.scala | Scala | apache-2.0 | 1,679 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.ct600.v3
import uk.gov.hmrc.ct.box._
import uk.gov.hmrc.ct.ct600.v3.retriever.RepaymentsBoxRetriever
case class REPAYMENTSQ1(value: Option[Boolean]) extends CtBoxIdentifier("Send Repayment in all cases?")
with CtOptionalBoolean with Input with ValidatableBox[RepaymentsBoxRetriever] {
override def validate(boxRetriever: RepaymentsBoxRetriever): Set[CtValidation] = validateAsMandatory(this)
}
| hmrc/ct-calculations | src/main/scala/uk/gov/hmrc/ct/ct600/v3/REPAYMENTSQ1.scala | Scala | apache-2.0 | 1,027 |
package com.hackerforfuture.codeprototypes.dataloader.utils
import java.io.File
import java.util
import com.hackerforfuture.codeprototypes.dataloader.common.{FileContextV1, PathInfo}
import scala.util.matching.Regex
/**
* com.hackerforfuture.codeprototypes.dataloader.Utils
* Created by 10192057 on 2018/1/23 0023.
*/
object LoaderUtils {
// TODO Recursive List Files
def recursiveListFiles(rootFile: File): Array[File] = {
if (rootFile.isFile) {
Array(rootFile)
} else {
rootFile.listFiles().flatMap(recursiveListFiles)
}
}
def getTimestamp(file: File): String = {
val fileName = file.getName
val regex: Regex ="""\\d{8,14}""".r
val temp: Option[Regex.Match] = regex.findFirstMatchIn(fileName)
if (temp.isDefined) {
val timeStamp: String = temp.get.matched
if (timeStamp.length > 14) {
timeStamp.substring(0, 14)
} else {
timeStamp.padTo(14, "0").toString()
}
} else {
file.lastModified().toString
}
}
def genFileContext(file: File, sep: String = "/"): FileContextV1 = {
val fileName = file.getName
val filePath = file.getPath.replaceAll("""\\\\""", sep)
val fileParentPath = file.getParentFile.getParent.replaceAll("""\\\\""", sep)
val errorPath = fileParentPath + sep + "errorfiles"
val tempPath = fileParentPath.split("destpath=").last
val pathDetailInfo = tempPath.split(sep).toList
val udfParams: util.HashMap[String, String] = new util.HashMap[String, String]
pathDetailInfo match {
// telecom_pm_cm/hdfs/zxvmax/telecom/temp/lte/rawdata/itg_pm_lte_enb_h_enb_d/p_provincecode=510000/
case _ :: handleMode :: tail =>
val destPath = sep + tail.mkString(sep).replace("target=", "") + sep
val tempDestPath = "/temp/dataloader" + destPath
tail.filter(_.contains("=")).foreach {
elem =>
val partVal = elem.split("=")
udfParams.put(partVal.head, partVal.last)
}
val tgtName: Option[String] = Some(udfParams.getOrDefault("target", ""))
val p_region = udfParams.getOrDefault("index", "")
val targetKey = if (p_region.nonEmpty) tgtName.get + "_" + p_region else tgtName.get
val pathInfo: PathInfo = PathInfo(filePath,
errorPath,
tempDestPath,
destPath)
FileContextV1(file, fileName, targetKey, handleMode, pathInfo)
case _ =>
FileContextV1(file, file.getName, "", "", PathInfo(filePath, errorPath, "", ""))
}
}
}
| BiyuHuang/CodePrototypesDemo | demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/utils/LoaderUtils.scala | Scala | apache-2.0 | 2,523 |
package memnets.model.impl
import memnets.model._
private final class FImpl(
val owner: Y,
override val description: String,
var inputs: Iterable[Y]
)(f: TickFunction)
extends ElementBase
with F {
var _lastEval = 0.0
var _scale: Option[Float] = None
def act = _lastEval
def eval(te: Tick) = {
_lastEval = f.eval(te)
_lastEval
}
def scale = if (_scale.isEmpty) owner.ui.scale else _scale
def scale_=(v: Double): Unit = { _scale = Some(v.asInstanceOf[Float]) }
override def toString: String = s"F[owner= $owner,desc= $description]"
}
| MemoryNetworks/memnets | api/src/main/scala/memnets/model/impl/FImpl.scala | Scala | apache-2.0 | 579 |
/*
* Copyright 2014 Edulify.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.edulify.play.hikaricp
import java.sql.{SQLException, DriverManager}
import java.util.Properties
import com.typesafe.config.ConfigFactory
import org.jdbcdslog.LogSqlDataSource
import org.specs2.execute.AsResult
import org.specs2.mutable.Specification
import org.specs2.specification.{Scope, AroundExample}
import play.api.{PlayException, Configuration}
import play.api.libs.JNDI
import scala.collection.JavaConversions._
import scala.util.control.NonFatal
class HikariCPDBApiSpec extends Specification with AroundExample {
"When starting HikariCP DB API" should {
"create data sources" in new DataSourceConfigs {
val api = new HikariCPDBApi(config, classLoader)
val ds = api.getDataSource("default")
ds.getConnection.getMetaData.getURL == "jdbc:h2:mem:test"
}
"create data source with logSql enabled" in new DataSourceConfigs {
val api = new HikariCPDBApi(configWithLogSql, classLoader)
val ds = api.getDataSource("default")
ds.isInstanceOf[LogSqlDataSource] must beTrue
}
"bind data source to jndi" in new DataSourceConfigs {
val api = new HikariCPDBApi(configWithLogSql, classLoader)
val ds = api.getDataSource("default")
JNDI.initialContext.lookup("TestContext") must not(beNull)
}
"register driver configured in `driverClassName`" in new DataSourceConfigs {
val api = new HikariCPDBApi(configWithLogSql, classLoader)
val ds = api.getDataSource("default")
DriverManager.getDrivers.exists( driver => driver.getClass.getName == "org.h2.Driver") must beTrue
}
"create more than one datasource" in new DataSourceConfigs {
val api = new HikariCPDBApi(multipleDataSources, classLoader)
api.getDataSource("default") must not(beNull)
api.getDataSource("default2") must not(beNull)
}
"report misconfiguration error when" in {
"dataSourceClassName and jdbcUrl are not present" in new DataSourceConfigs {
val properties = new Properties()
properties.setProperty("default.username", "sa")
properties.setProperty("default.password", "")
val misConfig = new Configuration(ConfigFactory.parseProperties(properties))
new HikariCPDBApi(misConfig, classLoader) must throwA[PlayException]
}
"db configuration has no dataSources configured" in new DataSourceConfigs {
val properties = new Properties()
properties.setProperty("default", "")
val misConfig = new Configuration(ConfigFactory.parseProperties(properties))
new HikariCPDBApi(misConfig, classLoader) must throwA[PlayException]
}
}
}
"When shutting down pool" should {
"shutdown data source" in new DataSourceConfigs {
val api = new HikariCPDBApi(config, classLoader)
val ds = api.getDataSource("default")
api.shutdownPool(ds)
ds.getConnection must throwA[SQLException]
}
"shutdown data source with logSql enabled" in new DataSourceConfigs {
val api = new HikariCPDBApi(configWithLogSql, classLoader)
val ds = api.getDataSource("default")
api.shutdownPool(ds)
ds.getConnection must throwA[SQLException]
}
}
def around[T : AsResult](t: =>T) = {
Class.forName("org.h2.Driver")
val conn = DriverManager.getConnection("jdbc:h2:mem:test", "sa", "")
try {
val result = AsResult(t)
result
} catch {
case NonFatal(e) => failure(e.getMessage)
} finally {
conn.close()
}
}
}
trait DataSourceConfigs extends Scope {
def config = new Configuration(ConfigFactory.parseProperties(Props().properties))
def configWithLogSql = {
val props = new Props().properties
props.setProperty("default.logSql", "true")
new Configuration(ConfigFactory.parseProperties(props))
}
def multipleDataSources = new Configuration(ConfigFactory.parseProperties(Props().multipleDatabases))
def classLoader = this.getClass.getClassLoader
}
case class Props() {
def properties = {
val props = new Properties()
props.setProperty("default.jdbcUrl", "jdbc:h2:mem:test")
props.setProperty("default.username", "sa")
props.setProperty("default.password", "")
props.setProperty("default.driverClassName", "org.h2.Driver")
props.setProperty("default.jndiName", "TestContext")
props
}
def multipleDatabases = {
val props = new Properties()
props.setProperty("default.jdbcUrl", "jdbc:h2:mem:test")
props.setProperty("default.username", "sa")
props.setProperty("default.password", "")
props.setProperty("default.driverClassName", "org.h2.Driver")
props.setProperty("default.jndiName", "TestContext")
// default2
props.setProperty("default2.jdbcUrl", "jdbc:h2:mem:test")
props.setProperty("default2.username", "sa")
props.setProperty("default2.password", "")
props.setProperty("default2.driverClassName", "org.h2.Driver")
props.setProperty("default2.jndiName", "TestContext")
props
}
} | apismensky/play-hikaricp.edulify.com | test/com/edulify/play/hikaricp/HikariCPDBApiSpec.scala | Scala | apache-2.0 | 5,559 |
/**
* Copyright (C) 2016 DANS - Data Archiving and Networked Services ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package nl.knaw.dans.easy.license
import java.io.InputStream
import java.util
import javax.naming.directory.Attributes
import com.yourmediashelf.fedora.generated.management.DatastreamProfile
import nl.knaw.dans.pf.language.emd.Term.Name
import nl.knaw.dans.pf.language.emd._
import org.scalamock.scalatest.MockFactory
import rx.lang.scala.Observable
import rx.lang.scala.observers.TestSubscriber
import scala.language.reflectiveCalls
class DatasetLoaderSpec extends UnitSpec with MockFactory {
trait MockEasyMetadata extends EasyMetadataImpl {
override def toString(x: String, y: Name): String = ""
override def toString(x: String, y: Term): String = ""
override def toString(x: String, y: MDContainer): String = ""
override def toString(x: String): String = ""
}
val fedoraMock = mock[Fedora]
val ldapMock = mock[Ldap]
val emdMock = mock[MockEasyMetadata]
implicit val parameters = new DatabaseParameters {
val fedora: Fedora = fedoraMock
val ldap: Ldap = ldapMock
}
"getUserById" should "query the user data from ldap for a given user id" in {
val user = new EasyUser("name", "org", "addr", "pc", "city", "cntr", "phone", "mail")
(ldapMock.query(_: DepositorID)(_: Attributes => EasyUser)) expects ("testID", *) returning Observable.just(user)
val loader = new DatasetLoaderImpl()
val testObserver = TestSubscriber[EasyUser]()
loader.getUserById("testID").subscribe(testObserver)
testObserver.assertValue(user)
testObserver.assertNoErrors()
testObserver.assertCompleted()
}
it should "fail with a NoSuchElementException if the query to ldap doesn't yield any user data" in {
(ldapMock.query(_: DepositorID)(_: Attributes => EasyUser)) expects ("testID", *) returning Observable.empty
val loader = new DatasetLoaderImpl()
val testObserver = TestSubscriber[EasyUser]()
loader.getUserById("testID").subscribe(testObserver)
testObserver.assertNoValues()
testObserver.assertError(classOf[NoSuchElementException])
testObserver.assertNotCompleted()
}
it should "fail with an IllegalArgumentException if the query to ldap yields more than one user data object" in {
val user1 = new EasyUser("name", "org", "addr", "pc", "city", "cntr", "phone", "mail")
val user2 = user1.copy(email = "mail2")
(ldapMock.query(_: DepositorID)(_: Attributes => EasyUser)) expects ("testID", *) returning Observable.just(user1, user2)
val loader = new DatasetLoaderImpl()
val testObserver = TestSubscriber[EasyUser]()
loader.getUserById("testID").subscribe(testObserver)
testObserver.assertNoValues()
testObserver.assertError(classOf[IllegalArgumentException])
testObserver.assertNotCompleted()
}
"getDatasetById" should "return the dataset corresponding to the given identifier" in {
val id = "testID"
val depID = "depID"
val user = new EasyUser("name", "org", "addr", "pc", "city", "cntr", "phone", "mail")
val audience = mock[EmdAudience]
val audiences = Seq("aud1", "aud2")
val files = Seq(
new FileItem("path1", FileAccessRight.RESTRICTED_GROUP, "chs1"),
new FileItem("path2", FileAccessRight.KNOWN, "chs2")
)
val expected = Dataset(id, emdMock, user, audiences, files)
(fedoraMock.getAMD(_: String)(_: InputStream => String)) expects (id, *) returning Observable.just(depID)
(fedoraMock.getEMD(_: String)(_: InputStream => EasyMetadata)) expects (id, *) returning Observable.just(emdMock)
emdMock.getEmdAudience _ expects () returning audience
val loader = new DatasetLoaderImpl() {
override def getUserById(depositorID: DepositorID): Observable[EasyUser] = {
if (depositorID == depID) Observable.just(user)
else fail(s"not the correct depositorID, was $depositorID, should be $depID")
}
override def getAudiences(a: EmdAudience): Observable[AudienceTitle] = {
if (a == audience) Observable.from(audiences)
else fail("not the correct audiences")
}
override def getFilesInDataset(did: DatasetID): Observable[FileItem] = {
if (did == id) Observable.from(files)
else fail(s"not the correct datasetID, was $did, should be $id")
}
}
val testObserver = TestSubscriber[Dataset]()
loader.getDatasetById(id).subscribe(testObserver)
testObserver.awaitTerminalEvent()
testObserver.assertValue(expected)
testObserver.assertNoErrors()
testObserver.assertCompleted()
}
"getFilesInDataset" should "return the files contained in the dataset corresponding to the given datasetID" in {
val id = "testID"
val pid1 = "pid1"
val pid2 = "pid2"
val fi1@FileItem(path1, accTo1, chcksm1) = FileItem("path1", FileAccessRight.NONE, "chcksm1")
val fi2@FileItem(path2, accTo2, chcksm2) = FileItem("path2", FileAccessRight.KNOWN, "chcksm2")
fedoraMock.queryRiSearch _ expects where[String](_ contains id) returning Observable.just(pid1, pid2)
(fedoraMock.getFileMetadata(_: String)(_: InputStream => (String, FileAccessRight.Value))) expects (pid1, *) returning Observable.just((path1, accTo1))
(fedoraMock.getFileMetadata(_: String)(_: InputStream => (String, FileAccessRight.Value))) expects (pid2, *) returning Observable.just((path2, accTo2))
(fedoraMock.getFile(_: String)(_: DatastreamProfile => String)) expects (pid1, *) returning Observable.just(chcksm1)
(fedoraMock.getFile(_: String)(_: DatastreamProfile => String)) expects (pid2, *) returning Observable.just(chcksm2)
val loader = new DatasetLoaderImpl()
val testObserver = TestSubscriber[Set[FileItem]]()
// toSet such that concurrency thingies (order of results) do not matter
loader.getFilesInDataset(id).toSet.subscribe(testObserver)
testObserver.awaitTerminalEvent()
testObserver.assertValues(Set(fi1, fi2))
testObserver.assertNoErrors()
testObserver.assertCompleted()
}
"getAudience" should "search the title for each audienceID in the EmdAudience in Fedora" in {
val (id1, title1) = ("id1", "title1")
(fedoraMock.getDC(_: String)(_: InputStream => String)) expects (id1, *) returning Observable.just(title1)
val loader = new DatasetLoaderImpl()
val testObserver = TestSubscriber[String]()
loader.getAudience(id1).subscribe(testObserver)
testObserver.awaitTerminalEvent()
testObserver.assertValues(title1)
testObserver.assertNoErrors()
testObserver.assertCompleted()
}
"getAudiences" should "search the title for each audienceID in the EmdAudience in Fedora" in {
val (id1, title1) = ("id1", "title1")
val (id2, title2) = ("id2", "title2")
val (id3, title3) = ("id3", "title3")
val audience = mock[EmdAudience]
audience.getValues _ expects() returning util.Arrays.asList(id1, id2, id3)
// can't do mocking due to concurrency issues
val loader = new DatasetLoaderImpl() {
var counter = 0
override def getAudience(audienceID: AudienceID) = {
counter += 1
counter match {
case 1 => Observable.just(title1)
case 2 => Observable.just(title2)
case 3 => Observable.just(title3)
case _ => throw new IllegalStateException(s"Called this method too many times. audienceID = $audienceID")
}
}
}
val testObserver = TestSubscriber[String]()
loader.getAudiences(audience).subscribe(testObserver)
testObserver.assertValues(title1, title2, title3)
testObserver.assertNoErrors()
testObserver.assertCompleted()
loader.counter shouldBe 3
}
}
| rvanheest-DANS-KNAW/easy-license-creator | src/test/scala/nl/knaw/dans/easy/license/DatasetLoaderSpec.scala | Scala | apache-2.0 | 8,214 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.network.netty
import java.io.File
import org.apache.spark.Logging
import org.apache.spark.util.Utils
import org.apache.spark.storage.{BlockId, FileSegment}
private[spark] class ShuffleSender(portIn: Int, val pResolver: PathResolver) extends Logging {
val server = new FileServer(pResolver, portIn)
server.start()
def stop() {
server.stop()
}
def port: Int = server.getPort()
}
/**
* An application for testing the shuffle sender as a standalone program.
*/
private[spark] object ShuffleSender {
def main(args: Array[String]) {
if (args.length < 3) {
System.err.println(
"Usage: ShuffleSender <port> <subDirsPerLocalDir> <list of shuffle_block_directories>")
System.exit(1)
}
val port = args(0).toInt
val subDirsPerLocalDir = args(1).toInt
val localDirs = args.drop(2).map(new File(_))
val pResovler = new PathResolver {
override def getBlockLocation(blockId: BlockId): FileSegment = {
if (!blockId.isShuffle) {
throw new Exception("Block " + blockId + " is not a shuffle block")
}
// Figure out which local directory it hashes to, and which subdirectory in that
val hash = Utils.nonNegativeHash(blockId)
val dirId = hash % localDirs.length
val subDirId = (hash / localDirs.length) % subDirsPerLocalDir
val subDir = new File(localDirs(dirId), "%02x".format(subDirId))
val file = new File(subDir, blockId.name)
new FileSegment(file, 0, file.length())
}
}
val sender = new ShuffleSender(port, pResovler)
}
}
| yelshater/hadoop-2.3.0 | spark-core_2.10-1.0.0-cdh5.1.0/src/main/scala/org/apache/spark/network/netty/ShuffleSender.scala | Scala | apache-2.0 | 2,409 |
package com.monsanto.arch.cloudformation.model
import java.io.File
import org.scalatest.{Matchers, FunSpec}
import spray.json._
/**
* Created by djdool on 6/30/15.
*/
class VPCWriter_UT extends FunSpec with Matchers {
case class Currency(value: Int)
object Currency extends DefaultJsonProtocol {
implicit val format: RootJsonFormat[Currency] = jsonFormat1(Currency.apply)
}
describe("VPCWriter") {
it("creates the file with proper contents under target") {
val actualFile = new File("target/scala-test/test.json")
try{
val rich = Currency(1000000)
object MyVPCWriter extends VPCWriter
MyVPCWriter.jsonToFile("test.json", "scala-test", rich)
assert(actualFile.exists())
val actual = io.Source.fromFile(actualFile).getLines().mkString("").parseJson
actual should be( rich.toJson )
} finally {
actualFile.delete()
actualFile.getParentFile.delete()
}
}
}
}
| MonsantoCo/cloudformation-template-generator | src/test/scala/com/monsanto/arch/cloudformation/model/VPCWriter_UT.scala | Scala | bsd-3-clause | 976 |
/*
* Wire
* Copyright (C) 2016 Wire Swiss GmbH
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package com.waz.utils.events
trait Observable[Listener] {
private object listenersMonitor
private var autowiring = true
@volatile private[events] var wired = false
@volatile private var listeners = Set.empty[Listener]
protected def onWire(): Unit
protected def onUnwire(): Unit
private[events] def subscribe(l: Listener): Unit = listenersMonitor.synchronized {
listeners += l
if (!wired) {
wired = true
onWire()
}
}
private[events] def unsubscribe(l: Listener): Unit = listenersMonitor.synchronized {
listeners -= l
if (wired && autowiring && listeners.isEmpty) {
wired = false
onUnwire()
}
}
private[events] def notifyListeners(invoke: Listener => Unit): Unit = listeners foreach invoke
private[events] def hasSubscribers = listeners.nonEmpty
def unsubscribeAll() = listenersMonitor.synchronized {
listeners = Set.empty
if (wired && autowiring) {
wired = false
onUnwire()
}
}
def disableAutowiring(): this.type = listenersMonitor.synchronized {
autowiring = false
if (!wired) {
wired = true
onWire()
}
this
}
}
| wireapp/wire-android-sync-engine | zmessaging/src/main/scala/com/waz/utils/events/Observable.scala | Scala | gpl-3.0 | 1,853 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.