code
stringlengths 5
1M
| repo_name
stringlengths 5
109
| path
stringlengths 6
208
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 5
1M
|
---|---|---|---|---|---|
import sbt._
import Keys._
object FPInScalaBuild extends Build {
val dependencies = Seq(
"org.scalatest" % "scalatest_2.11" % "2.2.4",
"junit" % "junit" % "4.12"
)
val opts = Project.defaultSettings ++ Seq(
name := "fpinscala" ,
version := "0.1",
scalaVersion := "2.11.6",
scalacOptions := Seq("-deprecation"),
resolvers += "Typesafe Repository" at "http://repo.typesafe.com/typesafe/releases/" ,
libraryDependencies ++= dependencies
)
/****
lazy val root =
Project(id = "fpinscala",
base = file("."),
settings = opts ++ Seq(
onLoadMessage ~= (_ + nio2check())
)) aggregate (chapterCode, exercises, answers)
*****/
lazy val root = Project("fpinscala", file(".")) settings(
version := "0.2",
scalaVersion := "2.11.4",
scalacOptions := Seq("-deprecation"),
libraryDependencies ++= dependencies
) aggregate (chapterCode, exercises, answers)
lazy val chapterCode =
Project(id = "chapter-code",
base = file("chaptercode"),
settings = opts)
lazy val exercises =
Project(id = "exercises",
base = file("exercises"),
settings = opts)
lazy val answers =
Project(id = "answers",
base = file("answers"),
settings = opts)
def nio2check(): String = {
val cls = "java.nio.channels.AsynchronousFileChannel"
try {Class.forName(cls); ""}
catch {case _: ClassNotFoundException =>
("\\nWARNING: JSR-203 \\"NIO.2\\" (" + cls + ") not found.\\n" +
"You are probably running Java < 1.7; answers will not compile.\\n" +
"You seem to be running " + System.getProperty("java.version") + ".\\n" +
"Try `project exercises' before compile, or upgrading your JDK.")
}
}
}
| sarbo/fpinscala | project/Build.scala | Scala | mit | 1,815 |
/**
* This file is part of the TA Buddy project.
* Copyright (c) 2014 Alexey Aksenov [email protected]
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Affero General Global License version 3
* as published by the Free Software Foundation with the addition of the
* following permission added to Section 15 as permitted in Section 7(a):
* FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED
* BY Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS»,
* Limited Liability Company «MEZHGALAKTICHESKIJ TORGOVYJ ALIANS» DISCLAIMS
* THE WARRANTY OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Global License for more details.
* You should have received a copy of the GNU Affero General Global License
* along with this program; if not, see http://www.gnu.org/licenses or write to
* the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
* Boston, MA, 02110-1301 USA, or download the license from the following URL:
* http://www.gnu.org/licenses/agpl.html
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Global License.
*
* In accordance with Section 7(b) of the GNU Affero General Global License,
* you must retain the producer line in every report, form or document
* that is created or manipulated using TA Buddy.
*
* You can be released from the requirements of the license by purchasing
* a commercial license. Buying such a license is mandatory as soon as you
* develop commercial activities involving the TA Buddy software without
* disclosing the source code of your own applications.
* These activities include: offering paid services to customers,
* serving files in a web or/and network application,
* shipping TA Buddy with a closed source product.
*
* For more information, please contact Digimead Team at this
* address: [email protected]
*/
package org.digimead.tabuddy.desktop.model.editor.ui.view.editor.bar
import org.digimead.digi.lib.api.XDependencyInjection
import org.digimead.tabuddy.desktop.core.definition.Context
import org.digimead.tabuddy.desktop.core.support.App
import org.digimead.tabuddy.desktop.model.editor.ui.view.editor.bar.editor.{ CollapseAll, ExpandAll, ToggleEmpty, ToggleIdentificators }
import org.eclipse.e4.core.contexts.ContextInjectionFactory
import org.eclipse.jface.action.{ CoolBarManager, ToolBarManager }
import org.eclipse.swt.SWT
import scala.language.implicitConversions
/**
* Editor actions toolbar.
*/
class EditorBar {
/** Create toolbar. */
def create(coolBar: CoolBarManager, context: Context) {
val toolBar = new ToolBarManager(SWT.NONE)
toolBar.add(ContextInjectionFactory.make(classOf[ToggleIdentificators], context))
toolBar.add(ContextInjectionFactory.make(classOf[ToggleEmpty], context))
toolBar.add(ContextInjectionFactory.make(classOf[ExpandAll], context))
toolBar.add(ContextInjectionFactory.make(classOf[CollapseAll], context))
coolBar.add(toolBar)
}
}
object EditorBar {
implicit def bar2implementation(b: EditorBar.type): EditorBar = b.inner
/** Get EditorBar implementation. */
def inner = DI.implementation
/**
* Dependency injection routines
*/
private object DI extends XDependencyInjection.PersistentInjectable {
/** EditorBar implementation. */
lazy val implementation = injectOptional[EditorBar] getOrElse new EditorBar
}
}
| digimead/digi-TABuddy-desktop | part-model-editor/src/main/scala/org/digimead/tabuddy/desktop/model/editor/ui/view/editor/bar/EditorBar.scala | Scala | agpl-3.0 | 3,750 |
package org.jetbrains.plugins.scala.lang.refactoring.util
import com.intellij.lang.ASTNode
import java.util.Map
import com.intellij.psi.impl.source.tree.{CompositeElement, TreeElement, TreeCopyHandler}
import org.jetbrains.plugins.scala.lang.parser.ScalaElementTypes
import com.intellij.psi.impl.source.SourceTreeToPsiMap
import org.jetbrains.plugins.scala.lang.psi.api.base.ScReferenceElement
import org.jetbrains.plugins.scala.lang.resolve.ScalaResolveResult
import org.jetbrains.plugins.scala.lang.psi.types.ScSubstitutor
import com.intellij.psi.PsiNamedElement
import com.intellij.openapi.util.Key
import com.intellij.util.IncorrectOperationException
import org.jetbrains.plugins.scala.lang.psi.ScalaPsiElement
/**
* User: Alexander Podkhalyuzin
* Date: 15.09.2009
*/
class ScalaChangeUtilSupport extends TreeCopyHandler {
def encodeInformation(element: TreeElement, original: ASTNode, encodingState: Map[Object, Object]): Unit = {
if (!element.isInstanceOf[ScalaPsiElement]) return
if (original.isInstanceOf[CompositeElement]) {
original.getElementType match {
case ScalaElementTypes.REFERENCE | ScalaElementTypes.REFERENCE_EXPRESSION | ScalaElementTypes.TYPE_PROJECTION => {
val res = original.getPsi.asInstanceOf[ScReferenceElement].bind
res match {
case Some(resolveResult@ScalaResolveResult(elem: PsiNamedElement, subst: ScSubstitutor)) => {
element.putCopyableUserData(ScalaChangeUtilSupport.REFERENCED_MEMBER_KEY, elem)
}
case _ =>
}
}
case _ =>
}
}
}
def decodeInformation(element: TreeElement, decodingState: Map[Object, Object]): TreeElement = {
if (!element.isInstanceOf[ScalaPsiElement]) return null
if (element.isInstanceOf[CompositeElement]) {
if (element.getElementType == ScalaElementTypes.REFERENCE || element.getElementType == ScalaElementTypes.REFERENCE_EXPRESSION ||
element.getElementType == ScalaElementTypes.TYPE_PROJECTION) {
var ref = SourceTreeToPsiMap.treeElementToPsi(element).asInstanceOf[ScReferenceElement]
val named: PsiNamedElement = element.getCopyableUserData(ScalaChangeUtilSupport.REFERENCED_MEMBER_KEY)
if (named != null) {
element.putCopyableUserData(ScalaChangeUtilSupport.REFERENCED_MEMBER_KEY, null)
val res = ref.resolve
if (!element.getManager.areElementsEquivalent(res, named)) {
try {
if (ref.qualifier == None) {
ref = ref.bindToElement(named).asInstanceOf[ScReferenceElement]
}
}
catch {
case ignored: IncorrectOperationException =>
}
return SourceTreeToPsiMap.psiElementToTree(ref).asInstanceOf[TreeElement]
} //todo: else
}
}
return element
}
return null
}
}
object ScalaChangeUtilSupport {
val REFERENCED_MEMBER_KEY: Key[PsiNamedElement] = Key.create("REFERENCED_MEMBER_KEY")
} | consulo/consulo-scala | src/org/jetbrains/plugins/scala/lang/refactoring/util/ScalaChangeUtilSupport.scala | Scala | apache-2.0 | 3,013 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.openwhisk.core.database.s3
import akka.http.scaladsl.model.Uri.Path
import com.typesafe.config.ConfigFactory
import java.time.Instant
import org.apache.openwhisk.core.ConfigKeys
import org.apache.openwhisk.core.database.s3.S3AttachmentStoreProvider.S3Config
import org.junit.runner.RunWith
import org.scalatest.junit.JUnitRunner
import org.scalatest.{FlatSpec, Matchers, OptionValues}
import pureconfig.loadConfigOrThrow
@RunWith(classOf[JUnitRunner])
class CloudFrontSignerTests extends FlatSpec with Matchers with OptionValues {
val qt = "\\"\\"\\""
val privateKey =
"""-----BEGIN RSA PRIVATE KEY-----
|MIIBPAIBAAJBAOY+Q7vyH1SnCUoFIpzqmZe1TNCxiE6zuiMRmjuJqiAzQWdb5hEA
|ZaC+f7Lcu53IvczZR0KsP4JndzG23rVg/y0CAwEAAQJBAMK+F3x4ppdrUSgSf9xJ
|cfAnoPlDsA8hZWcUFGgXYJYqKYw3NqoYG5fwyZ7xrwdMhpbdgD++nsBC/JMwUhEB
|h+ECIQDzj5Tbd7WvfaKGjozwQgHA9u3f53kxCWovpFEngU6VNwIhAPIAkAPnzuDr
|q3cEyAbM49ozjyc6/NOV6QK65HQj1gC7AiBrax/Ty3At/dL4VVaDgBkV6dHvtj8V
|CXnzmRzRt43Y8QIhAIzrvPE5RGP/eEqHUz96glhm276Zf+5qBlTbpfrnf0/PAiEA
|r1vFsvC8+KSHv7XGU1xfeiHHpHxEfDvJlX7/CxeWumQ=
|-----END RSA PRIVATE KEY-----
|""".stripMargin
val keyPairId = "OPENWHISKISFUNTOUSE"
val configString =
s"""whisk {
| s3 {
| bucket = "openwhisk-test"
| prefix = "dev"
| cloud-front-config {
| domain-name = "foo.com"
| key-pair-id = "$keyPairId"
| private-key = $qt$privateKey$qt
| timeout = 10 m
| }
| }
|}""".stripMargin
behavior of "CloudFront config"
it should "generate a signed url" in {
val config = ConfigFactory.parseString(configString).withFallback(ConfigFactory.load())
val s3Config = loadConfigOrThrow[S3Config](config, ConfigKeys.s3)
val signer = CloudFrontSigner(s3Config.cloudFrontConfig.get)
val expiration = Instant.now().plusSeconds(s3Config.cloudFrontConfig.get.timeout.toSeconds)
val uri = signer.getSignedURL("bar")
val query = uri.query()
//A signed url is of format
//https://<domain-name>/<object key>?Expires=xxx&Signature=xxx&Key-Pair-Id=xxx
uri.scheme shouldBe "https"
uri.path.tail shouldBe Path("bar")
query.get("Expires") shouldBe Some(expiration.getEpochSecond.toString)
query.get("Signature") shouldBe defined
query.get("Key-Pair-Id").value shouldBe keyPairId
}
}
| openwhisk/openwhisk | tests/src/test/scala/org/apache/openwhisk/core/database/s3/CloudFrontSignerTests.scala | Scala | apache-2.0 | 3,208 |
package com.toscaruntime.it
import com.toscaruntime.it.TestConstant._
import com.toscaruntime.it.steps.CsarsSteps._
import com.toscaruntime.it.steps.DeploymentsSteps._
import org.scalatest.MustMatchers
/**
* Deployment images specs
*
* @author Minh Khang VU
*/
class DeploymentsSpec extends AbstractSpec with MustMatchers {
info("As a user I want to be able to create/ list / delete deployment images")
feature("Create/ list / delete deployment images") {
scenario("Wordpress") {
Given("I download and install all necessary types for wordpress deployment")
installNormativeTypesAndProviders()
downloadZipFileAndExtract("https://github.com/alien4cloud/samples/archive/master.zip", tempPath)
assertNoCompilationErrorsDetected(installCsar(tempPath.resolve("samples-master").resolve("apache")))
assertNoCompilationErrorsDetected(installCsar(tempPath.resolve("samples-master").resolve("mysql")))
assertNoCompilationErrorsDetected(installCsar(tempPath.resolve("samples-master").resolve("php")))
assertNoCompilationErrorsDetected(installCsar(tempPath.resolve("samples-master").resolve("wordpress")))
And("I install the abstract wordpress topology")
assertNoCompilationErrorsDetected(installCsar(tempPath.resolve("samples-master").resolve("topology-wordpress")))
When("I create deployment image for the wordpress docker topology")
val success = createDeploymentImage("wordpress")
Then("It should succeed")
success must be(true)
When("I list deployment images")
val deploymentImageList = listDeploymentImages()
Then("The result should contain wordpress")
assertDeploymentImageListContain(deploymentImageList, "wordpress")
When("I delete the deployment image")
deleteDeploymentImage("wordpress")
And("I list deployment images")
val deploymentImageListAfterDeletion = listDeploymentImages()
Then("The result should not contain wordpress")
assertDeploymentImageListNotContain(deploymentImageListAfterDeletion, "wordpress")
}
}
}
| vuminhkh/tosca-runtime | test/src/it/scala/com/toscaruntime/it/DeploymentsSpec.scala | Scala | mit | 2,092 |
package org.hammerlab.bam.check
import hammerlab.iterator._
import hammerlab.path._
import magic_rdds.zip._
import org.apache.spark.rdd.RDD
import org.hammerlab.bam.check
import org.hammerlab.bam.check.Checker.MakeChecker
import org.hammerlab.bam.check.full.error.Flags
import org.hammerlab.bam.check.indexed.BlocksAndIndexedRecords
import org.hammerlab.bam.kryo.pathSerializer
import org.hammerlab.bgzf.Pos
import org.hammerlab.bgzf.block.{ Metadata, PosIterator }
import org.hammerlab.channel.CachingChannel._
import org.hammerlab.channel.SeekableByteChannel
import org.hammerlab.kryo._
import scala.collection.mutable
trait CallPartition {
self: CheckerApp[_] ⇒
def callPartition[
C1 <: check.Checker[Boolean],
Call2,
C2 <: check.Checker[Call2]
](
blocks: Iterator[Metadata]
)(
implicit
makeChecker1: MakeChecker[Boolean, C1],
makeChecker2: MakeChecker[Call2, C2]
): Iterator[(Pos, (Boolean, Call2))] = {
val ch = SeekableByteChannel(path).cache
val checker1 = makeChecker1(ch)
val checker2 = makeChecker2(ch)
blocks
.flatMap {
block ⇒
compressedSizeAccumulator.add(block.compressedSize)
PosIterator(block)
}
.map {
pos ⇒
pos →
(
checker1(pos),
checker2(pos)
)
}
.finish(ch.close())
}
def vsIndexed[Call, C <: Checker[Call]](
implicit makeChecker: MakeChecker[Call, C]
): RDD[(Pos, (Boolean, Call))] = {
val BlocksAndIndexedRecords(blocks, records) = BlocksAndIndexedRecords()
blocks
.zippartitions(records) {
(blocks, setsIter) ⇒
implicit val records = setsIter.next()
callPartition[indexed.Checker, Call, C](blocks)
}
}
}
object CallPartition
extends spark.Registrar(
cls[Path],
cls[mutable.WrappedArray.ofRef[_]],
cls[mutable.WrappedArray.ofInt],
cls[Flags]
)
| ryan-williams/spark-bam | cli/src/main/scala/org/hammerlab/bam/check/CallPartition.scala | Scala | apache-2.0 | 1,962 |
package test002
import scalikejdbc._
import skinny.dbmigration.DBSeeds
trait CreateTables extends DBSeeds { self: Connection =>
override val dbSeedsAutoSession = NamedAutoSession('test002)
addSeedSQL(
sql"""
create table account (
name varchar(128) not null)
"""
)
runIfFailed(sql"select count(1) from account")
}
| seratch/skinny-framework | orm/src/test/scala/test002/CreateTables.scala | Scala | mit | 332 |
package com.sksamuel.elastic4s.indexes
import com.sksamuel.elastic4s.testkit.DockerTests
import org.scalatest.{Matchers, WordSpec}
import scala.util.Try
class FlushIndexRequestTest extends WordSpec with Matchers with DockerTests {
private val indexname = "flushindextest"
Try {
client.execute {
deleteIndex(indexname)
}
}
client.execute {
createIndex(indexname).mappings(
mapping("pasta").fields(
textField("name")
)
)
}.await
"flush index" should {
"acknowledge" in {
client.execute {
flushIndex(indexname)
}.await.result.shards.successful > 0 shouldBe true
}
}
}
| Tecsisa/elastic4s | elastic4s-tests/src/test/scala/com/sksamuel/elastic4s/indexes/FlushIndexRequestTest.scala | Scala | apache-2.0 | 656 |
/***********************************************************************
* Copyright (c) 2013-2018 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.process.analytic
import com.typesafe.scalalogging.LazyLogging
import org.geotools.data.Query
import org.geotools.data.collection.ListFeatureCollection
import org.geotools.data.simple.{SimpleFeatureCollection, SimpleFeatureSource}
import org.geotools.process.factory.{DescribeParameter, DescribeProcess, DescribeResult}
import org.geotools.util.NullProgressListener
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.index.iterators.StatsScan
import org.locationtech.geomesa.index.stats.HasGeoMesaStats
import org.locationtech.geomesa.process.analytic.MinMaxProcess.MinMaxVisitor
import org.locationtech.geomesa.process.{FeatureResult, GeoMesaProcess, GeoMesaProcessVisitor}
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.locationtech.geomesa.utils.geotools.GeometryUtils
import org.locationtech.geomesa.utils.stats.Stat
import org.opengis.feature.Feature
import org.opengis.feature.simple.SimpleFeature
@DescribeProcess(
title = "Min/Max Process",
description = "Gets attribute bounds for a data set"
)
class MinMaxProcess extends GeoMesaProcess with LazyLogging {
@DescribeResult(description = "Output feature collection")
def execute(
@DescribeParameter(
name = "features",
description = "The feature set on which to query")
features: SimpleFeatureCollection,
@DescribeParameter(
name = "attribute",
description = "The attribute to gather bounds for")
attribute: String,
@DescribeParameter(
name = "cached",
description = "Return cached values, if available",
min = 0, max = 1)
cached: java.lang.Boolean = null
): SimpleFeatureCollection = {
require(attribute != null, "Attribute is a required field")
logger.debug(s"Attempting min/max process on type ${features.getClass.getName}")
val visitor = new MinMaxVisitor(features, attribute, Option(cached).forall(_.booleanValue()))
features.accepts(visitor, new NullProgressListener)
visitor.getResult.results
}
}
object MinMaxProcess {
class MinMaxVisitor(features: SimpleFeatureCollection, attribute: String, cached: Boolean)
extends GeoMesaProcessVisitor with LazyLogging {
private lazy val stat: Stat = Stat(features.getSchema, Stat.MinMax(attribute))
private var resultCalc: FeatureResult = _
// non-optimized visit
override def visit(feature: Feature): Unit = stat.observe(feature.asInstanceOf[SimpleFeature])
override def getResult: FeatureResult = {
if (resultCalc != null) {
resultCalc
} else {
createResult(stat.toJson)
}
}
override def execute(source: SimpleFeatureSource, query: Query): Unit = {
logger.debug(s"Running Geomesa min/max process on source type ${source.getClass.getName}")
source.getDataStore match {
case ds: HasGeoMesaStats =>
resultCalc = ds.stats.getAttributeBounds[Any](source.getSchema, attribute, query.getFilter, !cached) match {
case None => createResult("{}")
case Some(mm) => createResult(mm.toJson)
}
case ds =>
logger.warn(s"Running unoptimized min/max query on ${ds.getClass.getName}")
SelfClosingIterator(features.features).foreach(visit)
}
}
}
private def createResult(stat: String): FeatureResult = {
val sf = new ScalaSimpleFeature(StatsScan.StatsSft, "", Array(stat, GeometryUtils.zeroPoint))
FeatureResult(new ListFeatureCollection(StatsScan.StatsSft, Array[SimpleFeature](sf)))
}
}
| jahhulbert-ccri/geomesa | geomesa-process/geomesa-process-vector/src/main/scala/org/locationtech/geomesa/process/analytic/MinMaxProcess.scala | Scala | apache-2.0 | 4,235 |
package hu.frankdavid.diss.actor
import hu.frankdavid.diss.expression._
import scala.collection.mutable
import hu.frankdavid.diss.util.JobList
import hu.frankdavid.diss.DataTable
import hu.frankdavid.diss.DataTable.UpdateResult
import hu.frankdavid.diss.expression.Cell
import hu.frankdavid.diss.network.NetworkHandler
import scala.concurrent.{Promise, ExecutionContext, Future, future}
import ExecutionContext.Implicits.global
class CalculatorManager(socket: WebSocketHandler, table: DataTable, network: NetworkHandler) {
socket.calculatorManager = this
network.calculatorManager = this
private var jobs = new JobList()
private var jobsRunning: Future[Unit] = null
private var promises = mutable.HashMap[Expression, Promise[Any]]()
private val expressionsUnderProgress = new mutable.HashSet[Expression]()
var load = 0
def calculate(expression: Expression): Future[Any] = {
synchronized {
jobs += expression
val p = Promise[Any]()
promises(expression) = p
issueJobs()
p.future
}
}
def get(expression: HasValue) = table.get(expression)
def getAllCells() = {
table.bindings.map {
case (cell, hasValue) => (cell, (hasValue, table.get(hasValue)))
}.toList
}
def put(expression: Expression, value: Any, processUpdate: Boolean = true) {
val result = table.put(expression, value)
processUpdateResult(result)
}
def bind(cell: Cell, expression: HasValue, processUpdate: Boolean = true) {
socket.cellBindingChanged(cell, expression)
val result = table.bind(cell, expression)
if (processUpdate) {
network.sendBindingChanged(cell, expression)
processUpdateResult(result)
issueJobs()
}
}
def mergeJobs(targetCost: Long) {
var changed = false
do {
changed = false
jobs.foreach {
job =>
if (job.cost(table) < targetCost && job.parameters.length == 1) {
job.parameters(0) match {
case parent: Expression =>
jobs.remove(parent)
jobs.remove(job)
jobs.add(new CompoundExpression(List(parent, job)))
changed = true
case _ =>
}
}
}
} while (changed)
}
def parametersReady(expression: Expression): Boolean = {
for (p <- expression.parameters) {
p match {
case e: Expression if expressionsUnderProgress.contains(e) => return false
case _ =>
}
}
true
}
def issueJobs() {
if (jobsRunning == null) {
jobsRunning = future {
issueJobsSync()
}
jobsRunning.onComplete {
case _ =>
jobsRunning = null
}
}
}
private def issueJobsSync() {
while (this.jobs.size > 0) {
val jobs = this.jobs
this.jobs = new JobList()
mergeJobs(200)
sortJobsTopologically()
jobs.foreach {
job =>
if (parametersReady(job)) {
jobs.remove(job)
if (job.cost(table) > 2000) {
evaluateExpressionAsync(job)
} else {
val result = job.evaluateAndSave(table)
processUpdateResult(result)
}
}
}
this.jobs ++= jobs
}
resolvePromises()
}
private def evaluateExpressionAsync(expression: Expression) {
expressionsUnderProgress += expression
future {
val result = expression.evaluateAndSave(table)
processUpdateResult(result)
}.andThen {
case _ => expressionsUnderProgress -= expression
}
}
private def resolvePromises() {
val promises = this.promises
this.promises = new mutable.HashMap[Expression, Promise[Any]]()
promises.foreach {
case (expression, promise) => future {
promise.success(table.get(expression).orNull)
}
}
}
def processUpdateResult(updateResult: UpdateResult) {
updateResult.notifiedExpressions.foreach {
case c: Cell => table.get(c).foreach {
value =>
socket.cellValueChanged(c, value)
}
case expr =>
table.resolveExpression(expr).map(calculate)
}
}
def sortJobsTopologically() {
val visited = new mutable.HashSet[HasValue]()
val visiting = new mutable.HashSet[HasValue]()
val sorted = new JobList()
def visit(expression: HasValue) {
visiting += expression
table.listeners.getOrElse(expression, Set()).foreach {
e =>
if (e != expression && !visited.contains(e)) {
if (visiting.contains(e))
sys.error("Circular reference discovered")
visit(e)
}
}
visiting -= expression
visited += expression
expression match {
case e: Expression => sorted += e
case _ =>
}
}
if (jobs.size > 0) {
jobs.foreach(visit)
jobs.clear()
jobs ++= sorted.reverseIterator
}
}
//
// def resolveExpression(expression: HasValue): Expression = {
// expression match {
// case c: Cell if bindings.contains(c) => resolveExpression(bindings(c))
// case e: Expression => e
// case _ => throw new IllegalArgumentException("Can't resolve " + expression.toString)
// }
// }
} | frankdavid/diss | src/main/scala/hu/frankdavid/diss/actor/CalculatorManager.scala | Scala | apache-2.0 | 5,245 |
class Foo {
def foo[@specialized(Int) T](x: T) = new Object {
private final val myEdges = List(1, 2 , 3)
def boo: Unit = {
myEdges
}
}
}
| yusuke2255/dotty | tests/untried/pos/spec-private.scala | Scala | bsd-3-clause | 161 |
/*
* Copyright 2016 The BigDL Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intel.analytics.bigdl.utils.tf
import java.io.{File => JFile}
import java.nio.ByteOrder
import java.util.UUID
import com.google.protobuf.ByteString
import com.intel.analytics.bigdl.dataset.{DistributedDataSet, MiniBatch}
import com.intel.analytics.bigdl.nn._
import com.intel.analytics.bigdl.optim.{DistriOptimizer, Trigger}
import com.intel.analytics.bigdl.tensor.{Storage, Tensor}
import com.intel.analytics.bigdl.utils._
import org.apache.log4j.{Level, Logger}
import org.apache.spark.SparkContext
import org.apache.spark.rdd.RDD
import com.intel.analytics.bigdl.numeric.NumericFloat
import org.tensorflow.framework.{DataType, NodeDef, TensorProto, TensorShapeProto}
import scala.collection.mutable
import scala.sys.process._
import scala.math._
import scala.reflect.ClassTag
object TensorflowLoaderSpec {
private val data1 = Array(0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.1f)
private val data2 = Array(0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.0f, 0.0f, 1.1f)
private val input1: Tensor[Float] = Tensor[Float](Storage[Float](data1))
private val input2: Tensor[Float] = Tensor[Float](Storage[Float](data2))
private val nodeNumber = 4
private val coreNumber = 4
Engine.init(nodeNumber, coreNumber, true)
private val batchSize = 2 * coreNumber
private val prepareData: Int => (MiniBatch[Float]) = index => {
val input = Tensor[Float]().resize(batchSize, 10)
val target = Tensor[Float]().resize(batchSize)
var i = 0
while (i < batchSize) {
if (i % 2 == 0) {
target.setValue(i + 1, 0.0f)
input.select(1, i + 1).copy(input1)
} else {
target.setValue(i + 1, 0.1f)
input.select(1, i + 1).copy(input2)
}
i += 1
}
MiniBatch(input, target)
}
}
class TensorflowLoaderSpec extends TensorflowSpecHelper{
Logger.getLogger("org").setLevel(Level.WARN)
Logger.getLogger("akka").setLevel(Level.WARN)
import TensorflowLoaderSpec._
var sc: SparkContext = null
var dataSet: DistributedDataSet[MiniBatch[Float]] = null
override def doBefore(): Unit = {
val conf = Engine.createSparkConf().setAppName("RDDOptimizerSpec")
.setMaster("local[1]")
sc = SparkContext.getOrCreate(conf)
val rdd = sc.parallelize(1 to (256 * 4), 4).map(prepareData)
dataSet = new DistributedDataSet[MiniBatch[Float]] {
override def originRDD(): RDD[_] = rdd
override def data(train : Boolean): RDD[MiniBatch[Float]] = rdd
override def size(): Long = 256 * nodeNumber
override def shuffle(): Unit = {}
}
Engine.model.setPoolSize(1)
System.setProperty("bigdl.enableNHWC", "true")
}
override def doAfter(): Unit = {
if (sc != null) {
sc.stop()
}
System.setProperty("bigdl.enableNHWC", "false")
}
"TensorFlow loader" should "read a list of nodes from pb file" in {
val resource = getClass().getClassLoader().getResource("tf")
val path = processPath(resource.getPath()) + JFile.separator + "test.pb"
val results = TensorflowLoader.parse(path)
results.size() should be(14)
}
"TensorFlow loader" should "be able to build a TF graph" in {
val resource = getClass().getClassLoader().getResource("tf")
val path = processPath(resource.getPath()) + JFile.separator + "test.pb"
val results = TensorflowLoader.parse(path)
val (tfGraph, _, _) = TensorflowLoader.buildTFGraph(results, Seq("output"))
tfGraph.size should be(15) // there's a dummy output
val topSort = tfGraph.topologySort// It can do topology sort
topSort.length should be(15)
topSort(0).element should be(null)
topSort(1).element.getName should be("output")
topSort(2).element.getName should be("MatMul_1")
topSort(3).element.getName should be("Variable_3/read")
topSort(4).element.getName should be("Variable_3")
topSort(5).element.getName should be("Tanh")
topSort(6).element.getName should be("Variable_2/read")
topSort(7).element.getName should be("Variable_2")
topSort(8).element.getName should be("BiasAdd")
topSort(9).element.getName should be("MatMul")
topSort(10).element.getName should be("Variable_1/read")
topSort(11).element.getName should be("Variable_1")
topSort(12).element.getName should be("Placeholder")
topSort(13).element.getName should be("Variable/read")
topSort(14).element.getName should be("Variable")
}
"TensorFlow loader" should "be able to build a TF sub graph" in {
val resource = getClass().getClassLoader().getResource("tf")
val path = processPath(resource.getPath()) + JFile.separator + "test.pb"
val results = TensorflowLoader.parse(path)
val (tfGraph, _, _) = TensorflowLoader.buildTFGraph(results, Seq("output"),
(node: NodeDef) => node.getName == "Tanh")
tfGraph.size should be(9) // there's a dummy output
val topSort = tfGraph.topologySort// It can do topology sort
topSort.length should be(9)
topSort(0).element should be(null)
topSort(1).element.getName should be("output")
topSort(2).element.getName should be("MatMul_1")
topSort(3).element.getName should be("Variable_3/read")
topSort(4).element.getName should be("Variable_3")
topSort(5).element.getName should be("Tanh")
topSort(6).element.getName should be("Variable_2/read")
topSort(7).element.getName should be("Variable_2")
topSort(8).element.getName should be("input0")
}
"TensorFlow loader" should "be able to build a BigDL graph" in {
val resource = getClass().getClassLoader().getResource("tf")
val path = processPath(resource.getPath()) + JFile.separator + "test.pb"
val model = TensorflowLoader.load(path, Seq("Placeholder"), Seq("output"),
ByteOrder.LITTLE_ENDIAN)
val container = model.asInstanceOf[Graph[Float]]
container.modules.length should be(4)
RandomGenerator.RNG.setSeed(100)
val input = Tensor[Float](4, 1).rand()
val output1 = container.forward(input)
val model2 = Sequential[Float]()
val fc1 = Linear[Float](1, 10)
fc1.parameters()._1(0).fill(0.2f)
fc1.parameters()._1(1).fill(0.1f)
model2.add(fc1).add(Tanh())
val fc2 = Linear[Float](10, 1)
fc2.parameters()._1(0).fill(0.2f)
fc2.parameters()._1(1).fill(0.1f)
model2.add(fc2)
val output2 = model2.forward(input)
output1 should be(output2)
}
"TensorFlow loader" should "throw exception if input contain duplicate names" in {
val resource = getClass().getClassLoader().getResource("tf")
val path = processPath(resource.getPath()) + JFile.separator + "test.pb"
intercept[IllegalArgumentException] {
val model = TensorflowLoader.load(path, Seq("Placeholder", "Placeholder"), Seq("output"),
ByteOrder.LITTLE_ENDIAN)
}
}
"TensorFlow loader" should "throw exception if input contain conflict names" in {
val resource = getClass().getClassLoader().getResource("tf")
val path = processPath(resource.getPath()) + JFile.separator + "test.pb"
intercept[IllegalArgumentException] {
val model = TensorflowLoader.load(path, Seq("Placeholder", "Placeholder:0"), Seq("output"),
ByteOrder.LITTLE_ENDIAN)
}
}
"TensorFlow loader" should "throw exception if input location is incorrect" in {
val resource = getClass().getClassLoader().getResource("tf")
val path = processPath(resource.getPath()) + JFile.separator + "test.pb"
intercept[IllegalArgumentException] {
val model = TensorflowLoader.load(path, Seq("MatMul:2"), Seq("output"),
ByteOrder.LITTLE_ENDIAN)
}
}
"TensorFlow loader" should "be able to build a BigDL graph with specify input location" in {
val resource = getClass().getClassLoader().getResource("tf")
val path = processPath(resource.getPath()) + JFile.separator + "test.pb"
val model = TensorflowLoader.load(path, Seq("MatMul:0"), Seq("output"),
ByteOrder.LITTLE_ENDIAN)
val container = model.asInstanceOf[Graph[Float]]
container.modules.length should be(4)
RandomGenerator.RNG.setSeed(100)
val input = Tensor[Float](4, 1).rand()
val output1 = container.forward(input)
val model2 = Sequential[Float]()
val fc1 = Linear[Float](1, 10)
fc1.parameters()._1(0).fill(0.2f)
fc1.parameters()._1(1).fill(0.1f)
model2.add(fc1).add(Tanh())
val fc2 = Linear[Float](10, 1)
fc2.parameters()._1(0).fill(0.2f)
fc2.parameters()._1(1).fill(0.1f)
model2.add(fc2)
val output2 = model2.forward(input)
output1 should be(output2)
}
"TensorFlow loader" should "be able to build a BigDL graph from a subset of a tf graph" in {
val resource = getClass().getClassLoader().getResource("tf")
val path = processPath(resource.getPath()) + JFile.separator + "test.pb"
val model = TensorflowLoader.load(path, Seq("Tanh"), Seq("output"),
ByteOrder.LITTLE_ENDIAN)
val container = model.asInstanceOf[Graph[Float]]
container.modules.length should be(3)
RandomGenerator.RNG.setSeed(100)
val input = Tensor[Float](4, 10).rand()
val output1 = container.forward(input)
val model2 = Sequential[Float]()
model2.add(Tanh())
val fc2 = Linear[Float](10, 1)
fc2.parameters()._1(0).fill(0.2f)
fc2.parameters()._1(1).fill(0.1f)
model2.add(fc2)
val output2 = model2.forward(input)
output1 should be(output2)
}
"Shared weights" should "be the same instance" in {
tfCheck()
val modelName = "share_weight"
// Generate command and prepare the temp folder
val s = JFile.separator
val modelsFolder = processPath(getClass().getClassLoader().getResource("tf").getPath()) +
s + "models"
val modelScript = modelsFolder + s + s"$modelName.py"
val tmpLocation = java.io.File.createTempFile("tensorflowLoaderTest" + UUID.randomUUID(),
modelName)
tmpLocation.delete()
tmpLocation.mkdir()
require(runPython(s"$modelScript $tmpLocation"), "error when run the model script")
// Load the model and input/output tensors
val modelFile = tmpLocation + s + "model.pb"
val model = TensorflowLoader.load(modelFile, Seq("Placeholder"), Seq("output"),
ByteOrder.LITTLE_ENDIAN)
val container = model.asInstanceOf[Graph[Float]]
val l1 = container.modules(1).asInstanceOf[Linear[Float]]
val l2 = container.modules(3).asInstanceOf[Linear[Float]]
assert(l1.weight eq l2.weight)
assert(l1.bias eq l2.bias)
}
"Shared weights" should "be the same after running optimizer" in {
tfCheck()
val modelName = "share_weight"
// Generate command and prepare the temp folder
val s = JFile.separator
val modelsFolder = processPath(getClass().getClassLoader().getResource("tf").getPath()) +
s + "models"
val modelScript = modelsFolder + s + s"$modelName.py"
val tmpLocation = java.io.File.createTempFile("tensorflowLoaderTest" + UUID.randomUUID(),
modelName)
tmpLocation.delete()
tmpLocation.mkdir()
require(runPython(s"$modelScript $tmpLocation"), "error when run the model script")
// Load the model and input/output tensors
val modelFile = tmpLocation + s + "model.pb"
val model = TensorflowLoader.load(modelFile, Seq("Placeholder"), Seq("output"),
ByteOrder.LITTLE_ENDIAN)
val container = model.asInstanceOf[Graph[Float]]
val optimizer = new DistriOptimizer[Float](container, dataSet, new MSECriterion[Float]())
.setState(T("learningRate" -> 20.0))
.setEndWhen(Trigger.maxEpoch(1))
optimizer.optimize()
val l1 = container.modules(1).asInstanceOf[Linear[Float]]
val l2 = container.modules(3).asInstanceOf[Linear[Float]]
assert(l1.weight == l2.weight)
assert(l1.bias == l2.bias)
}
"static simple rnn " should "have the same result as tensorflow" in {
val output = Seq("output:0")
val comparePairs = testModel("rnn", output, backward = true)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-5) should be(true)
}
for (i <- output.length until comparePairs.length) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-3) should be(true)
}
}
"static lstm rnn " should "have the same result as tensorflow" in {
val output = Seq("output:0")
val comparePairs = testModel("rnn_lstm", output, backward = true)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-5) should be(true)
}
for (i <- output.length until comparePairs.length) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-2) should be(true)
}
}
"hand coded lstm rnn " should "have the same result as tensorflow" in {
val output = Seq("output:0")
val comparePairs = testModel("decoder", output, backward = true)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-5) should be(true)
}
for (i <- output.length until comparePairs.length) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-2) should be(true)
}
}
"TensorFlow control dep" should "be load correctly" in {
val output = Seq("output:0")
val comparePairs = testModel("control_dep", output, backward = true)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-7) should be(true)
}
for (i <- output.length until comparePairs.length) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-3) should be(true)
}
}
"Tensorflow load " should "be able to handle multiple edges" in {
val output = Seq("output:0")
val comparePairs = testModel("two_edge", output, backward = true)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-7) should be(true)
}
for (i <- output.length until comparePairs.length) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-3) should be(true)
}
}
"Tensorflow batchnorm nhwc" should "be loaded correctly" in {
val output = Seq("output:0")
val comparePairs = testModel("batch_norm_nhwc", output, backward = true)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-4) should be(true)
}
for (i <- output.length until comparePairs.length) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-3) should be(true)
}
}
"Tensorflow batchnorm nchw" should "be loaded correctly" in {
val output = Seq("output:0")
val comparePairs = testModel("batch_norm_nchw", output, backward = true)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-4) should be(true)
}
for (i <- output.length until comparePairs.length) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-3) should be(true)
}
}
"Tensorflow lenet" should "be load correctly" in {
val output = Seq("LeNet/pool2/MaxPool:0")
val comparePairs = testModel("lenet", output, backward = true)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-6) should be(true)
}
for (i <- output.length until comparePairs.length) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-4) should be(true)
}
}
"Tensorflow conv1d" should "be load correctly" in {
val output = Seq("output:0")
val comparePairs = testModel("temporal_convolution", output, backward = false)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-6) should be(true)
}
}
"Tensorflow Alexnet" should "be load correctly" in {
val output = Seq("alexnet_v2/fc8/squeezed:0")
val comparePairs = testModel("alexnet", output, backward = true)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-6) should be(true)
}
for (i <- output.length until comparePairs.length) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-4) should be(true)
}
}
"TensorFlow vgg_a" should "be load correctly" in {
val output = Seq("vgg_a/fc8/squeezed:0")
val comparePairs = testModel("vgga", output, backward = true)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-7) should be(true)
}
for (i <- output.length until comparePairs.length) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-3) should be(true)
}
}
"TensorFlow vgg_16" should "be load correctly" in {
val output = Seq("vgg_16/fc8/squeezed:0")
val comparePairs = testModel("vgg16", output, backward = true)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-7) should be(true)
}
for (i <- output.length until comparePairs.length) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-2) should be(true)
}
}
"TensorFlow vgg_19" should "be load correctly" in {
val output = Seq("vgg_19/fc8/squeezed:0")
val comparePairs = testModel("vgg19", output, backward = true)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-7) should be(true)
}
for (i <- output.length until comparePairs.length) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-2) should be(true)
}
}
"TensorFlow overfeat" should "be load correctly" in {
val output = Seq("overfeat/fc8/squeezed:0")
val comparePairs = testModel("overfeat", output, backward = true)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-6) should be(true)
}
for (i <- output.length until comparePairs.length) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-3) should be(true)
}
}
"TensorFlow inception_v3" should "be load correctly" in {
val output = Seq("InceptionV3/Logits/SpatialSqueeze:0")
val comparePairs = testModel("inception_v3", output, true)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-6) should be(true)
}
for (i <- output.length until comparePairs.length) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-2) should be(true)
}
}
"TensorFlow resnet_v1" should "be load correctly" in {
val output = Seq("resnet_v1_101/SpatialSqueeze:0")
val comparePairs = testModel("resnet_v1", output, backward = true)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-6) should be(true)
}
for (i <- output.length until comparePairs.length) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-1) should be(true)
}
}
"TensorFlow inception_resnet_v2" should "be load correctly" in {
val output = Seq("InceptionResnetV2/Logits/Logits/BiasAdd:0",
"InceptionResnetV2/AuxLogits/Logits/BiasAdd:0")
val comparePairs = testModel("inception_resnet_v2", output, backward = true)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-6) should be(true)
}
for (i <- output.length until comparePairs.length) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-1) should be(true)
}
}
"TensorArray operations" should "be load correctly" in {
val output = Seq("scatter_and_gather:0", "split_and_concat:0", "write_and_read:0", "size1:0",
"size2:0", "unstack_and_stack:0")
val comparePairs = testModel("tensor_array", output, backward = false)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-6) should be(true)
}
}
"dynamic rnn" should "be load correctly" in {
val output = Seq("rnn_loss:0")
val comparePairs = testModel("dynamic_rnn", output, backward = false)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-3) should be(true)
}
}
"dynamic rnn grad" should "be load correctly" in {
val output = Seq("gradOutput:0")
val comparePairs = testModel("dynamic_rnn_grad", output, backward = false)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-3) should be(true)
}
}
"dynamic lstm" should "be load correctly" in {
val output = Seq("lstm_loss:0")
val comparePairs = testModel("dynamic_lstm", output, backward = false)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-2) should be(true)
}
}
"dynamic lstm grad" should "be load correctly" in {
val output = Seq("gradOutput:0")
val comparePairs = testModel("dynamic_lstm_grad", output, backward = false)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-2) should be(true)
}
}
"dynamic gru" should "be load correctly" in {
val output = Seq("gru_loss:0")
val comparePairs = testModel("dynamic_gru", output, backward = false)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-2) should be(true)
}
}
"dynamic gru grad" should "be load correctly" in {
val output = Seq("gradOutput:0")
val comparePairs = testModel("dynamic_gru_grad", output, backward = false)
for (i <- output.indices) {
val (tf, bigdl) = comparePairs(i)
tf.almostEqual(bigdl, 1e-2) should be(true)
}
}
private def testModel(
modelName: String,
endPoints: Seq[String],
backward: Boolean): Seq[(Tensor[Float], Tensor[Float])] = {
tfCheck()
// Generate command and prepare the temp folder
val s = JFile.separator
val modelsFolder = processPath(getClass().getClassLoader().getResource("tf").getPath()) +
s + "models"
val modelScript = modelsFolder + s + s"$modelName.py"
val tmpLocation = java.io.File.createTempFile("tensorflowLoaderTest" + UUID.randomUUID(),
modelName)
tmpLocation.delete()
tmpLocation.mkdir()
if (backward) {
require(runPython(s"$modelScript $tmpLocation ${endPoints.mkString(",")} True"),
"error when run the model script")
} else {
require(runPython(s"$modelScript $tmpLocation ${endPoints.mkString(",")} False"),
"error when run the model script")
}
// Load the model and input/output tensors
import collection.JavaConverters._
val modelFile = tmpLocation + s + "model.pb"
val tfNodes = TensorflowLoader.parse(modelFile)
// filter node for gradient computing
val (tfGraph, inputs, _) =
TensorflowLoader.buildTFGraph(tfNodes, endPoints.map(_.split(":")(0)),
(node: NodeDef) => node.getName == "input_node")
val context = new Context[Float]()
val model = TensorflowLoader.buildBigDLModel(tfGraph, inputs.toSeq.map(_._2).flatten,
endPoints.map(_.split(":")(0)), ByteOrder.LITTLE_ENDIAN, "", Some(context), backward)
// Compare the tensor contents
val tfInputTensor = tfNodes.asScala.filter(_.getName == "input")(0)
.getAttrMap.get("value").getTensor
val tfOutputTensors = (0 until endPoints.length).map(
i => tfNodes.asScala.filter(_.getName == s"output$i")(0).getAttrMap.get("value").getTensor)
val input = TensorflowToBigDL.toTensor(tfInputTensor,
ByteOrder.LITTLE_ENDIAN)
val bigdlOutputs = if (endPoints.length == 1) {
Seq(model.forward(input).toTensor)
} else {
val t = model.forward(input).toTable
(1 to endPoints.length).map(t[Tensor[Float]](_))
}
val comparePair = new mutable.ArrayBuffer[(Tensor[Float], Tensor[Float])]()
val forwardPairs = tfOutputTensors.zip(bigdlOutputs).map { x =>
val tensor = TensorflowToBigDL.toTensor(x._1, ByteOrder.LITTLE_ENDIAN)
.asInstanceOf[Tensor[Float]]
(tensor, x._2)
}
comparePair ++= forwardPairs
println(s"Compare ${comparePair.length} pairs of output in this graph")
if (backward) {
// get gradient input of tensorflow
val tfGradInputs = (0 until endPoints.length).map{
i =>
val t = tfNodes.asScala.filter(_.getName == s"grad_input$i")(0)
.getAttrMap.get("value").getTensor
val tensor = TensorflowToBigDL.toTensor(t, ByteOrder.LITTLE_ENDIAN)
tensor
}
val gradInputs = if (endPoints.length == 1) {
tfGradInputs(0)
} else {
val gradInputsTable = T()
tfGradInputs.foreach {
case output =>
gradInputsTable.insert[Tensor[_]](output)
}
gradInputsTable
}
// check shape equality here
for (i <- 0 until endPoints.length) {
bigdlOutputs(i).size() should be(tfGradInputs(i).size())
}
// find all gradients tensor of variables in tensorflow graph
val tfGradTensorsMap = context.tensorNames().map{
node =>
val t = tfNodes.asScala.filter(_.getName.contains(node + "_grad"))(0)
t.getName ->
TensorflowToBigDL
.toTensor(t.getAttrMap.get("value").getTensor, ByteOrder.LITTLE_ENDIAN)
.asInstanceOf[Tensor[Float]]
}.toMap
// do backward
model.backward(input, gradInputs)
val pairs = context.tensorNames().map { x =>
val name = s"${x}_grad"
var tensor = tfGradTensorsMap.get(name).orNull
var (_, grad, trans) = context(x)
trans match {
case Some(transpose) =>
for ((firstDim, secondDIm) <- transpose) {
tensor = tensor.transpose(firstDim, secondDIm)
}
tensor = tensor.contiguous()
case None =>
}
(tensor, grad)
}.toSeq.filter(_._1 != null)
comparePair ++= pairs
println(s"Compare ${pairs.length} pairs of gradient in this graph")
}
tmpLocation.deleteOnExit()
comparePair
}
private def processPath(path: String): String = {
if (path.contains(":")) {
path.substring(1)
} else {
path
}
}
}
| wzhongyuan/BigDL | spark/dl/src/test/scala/com/intel/analytics/bigdl/utils/tf/TensorflowLoaderSpec.scala | Scala | apache-2.0 | 26,782 |
import org.junit.runner._
import org.openqa.selenium.phantomjs.PhantomJSDriver
import org.specs2.mutable._
import org.specs2.runner._
import play.api.test._
/**
* add your integration spec here.
* An integration test will fire up a whole play application in a real (or headless) browser
*/
@RunWith(classOf[JUnitRunner])
class IntegrationSpec extends Specification {
"Application" should {
"work from within a browser" in new WithPhantomJs {
browser.goTo("http://localhost:" + port)
browser.pageSource must contain("GitHubスカウターオンライン")
}
}
}
| saturday06/github-scouter.net | test/IntegrationSpec.scala | Scala | mit | 593 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.util
import java.io.{File, IOException}
import java.text.SimpleDateFormat
import java.util
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.collection.mutable.ListBuffer
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.hadoop.mapred.JobConf
import org.apache.hadoop.mapreduce.Job
import org.apache.spark.sql.execution.command.{AlterPartitionModel, DataMapField, Field, PartitionerField}
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.datamap.Segment
import org.apache.carbondata.core.datastore.block.{SegmentProperties, TableBlockInfo}
import org.apache.carbondata.core.datastore.filesystem.CarbonFile
import org.apache.carbondata.core.datastore.impl.FileFactory
import org.apache.carbondata.core.metadata.{AbsoluteTableIdentifier, CarbonTableIdentifier, SegmentFileStore}
import org.apache.carbondata.core.metadata.schema.PartitionInfo
import org.apache.carbondata.core.metadata.schema.partition.PartitionType
import org.apache.carbondata.core.metadata.schema.table.CarbonTable
import org.apache.carbondata.core.mutate.CarbonUpdateUtil
import org.apache.carbondata.core.readcommitter.TableStatusReadCommittedScope
import org.apache.carbondata.core.statusmanager.SegmentStatusManager
import org.apache.carbondata.core.util.CarbonUtil
import org.apache.carbondata.core.util.path.CarbonTablePath
import org.apache.carbondata.hadoop.CarbonInputSplit
import org.apache.carbondata.hadoop.api.{CarbonInputFormat, CarbonTableInputFormat}
import org.apache.carbondata.hadoop.util.CarbonInputFormatUtil
import org.apache.carbondata.processing.loading.model.CarbonLoadModel
import org.apache.carbondata.spark.util.CommonUtil
object PartitionUtils {
def getListInfo(originListInfo: String): List[List[String]] = {
var listInfo = ListBuffer[List[String]]()
var templist = ListBuffer[String]()
val arr = originListInfo.split(",")
.map(_.trim())
var groupEnd = true
val iter = arr.iterator
while (iter.hasNext) {
val value = iter.next()
if (value.startsWith("(")) {
templist += value.replace("(", "").trim()
groupEnd = false
} else if (value.endsWith(")")) {
templist += value.replace(")", "").trim()
listInfo += templist.toList
templist.clear()
groupEnd = true
} else {
if (groupEnd) {
templist += value
listInfo += templist.toList
templist.clear()
} else {
templist += value
}
}
}
listInfo.toList
}
/**
* verify the add/split information and update the partitionInfo:
* 1. update rangeInfo/listInfo
* 2. update partitionIds
*/
def updatePartitionInfo(partitionInfo: PartitionInfo, partitionIdList: List[Int],
partitionId: Int, splitInfo: List[String], timestampFormatter: SimpleDateFormat,
dateFormatter: SimpleDateFormat): Unit = {
val columnDataType = partitionInfo.getColumnSchemaList.get(0).getDataType
val index = partitionIdList.indexOf(partitionId)
if (index < 0) {
throw new IllegalArgumentException("Invalid Partition Id " + partitionId +
"\n Use show partitions table_name to get the list of valid partitions")
}
if (partitionInfo.getPartitionType == PartitionType.RANGE) {
val rangeInfo = partitionInfo.getRangeInfo.asScala.toList
val newRangeInfo = partitionId match {
case 0 => rangeInfo ++ splitInfo
case _ => rangeInfo.take(index - 1) ++ splitInfo ++
rangeInfo.takeRight(rangeInfo.size - index)
}
CommonUtil.validateRangeInfo(newRangeInfo, columnDataType,
timestampFormatter, dateFormatter)
partitionInfo.setRangeInfo(newRangeInfo.asJava)
} else if (partitionInfo.getPartitionType == PartitionType.LIST) {
val originList = partitionInfo.getListInfo.asScala.map(_.asScala.toList).toList
if (partitionId != 0) {
val targetListInfo = partitionInfo.getListInfo.get(index - 1)
CommonUtil.validateSplitListInfo(targetListInfo.asScala.toList, splitInfo, originList)
} else {
CommonUtil.validateAddListInfo(splitInfo, originList)
}
val addListInfo = PartitionUtils.getListInfo(splitInfo.mkString(","))
val newListInfo = partitionId match {
case 0 => originList ++ addListInfo
case _ => originList.take(index - 1) ++ addListInfo ++
originList.takeRight(originList.size - index)
}
partitionInfo.setListInfo(newListInfo.map(_.asJava).asJava)
}
if (partitionId == 0) {
partitionInfo.addPartition(splitInfo.size)
} else {
partitionInfo.splitPartition(index, splitInfo.size)
}
}
/**
* Used for alter table partition commands to get segmentProperties in spark node
* @param identifier
* @param segmentId
* @param oldPartitionIdList Task id group before partition info is changed
* @return
*/
def getSegmentProperties(identifier: AbsoluteTableIdentifier, segmentId: String,
partitionIds: List[String], oldPartitionIdList: List[Int],
partitionInfo: PartitionInfo,
carbonTable: CarbonTable): SegmentProperties = {
val tableBlockInfoList =
getPartitionBlockList(
identifier,
segmentId,
partitionIds,
oldPartitionIdList,
partitionInfo,
carbonTable)
val footer = CarbonUtil.readMetadatFile(tableBlockInfoList.get(0))
val segmentProperties = new SegmentProperties(footer.getColumnInTable,
footer.getSegmentInfo.getColumnCardinality)
segmentProperties
}
def getPartitionBlockList(identifier: AbsoluteTableIdentifier, segmentId: String,
partitionIds: List[String], oldPartitionIdList: List[Int],
partitionInfo: PartitionInfo,
carbonTable: CarbonTable): java.util.List[TableBlockInfo] = {
val jobConf = new JobConf(new Configuration)
val job = new Job(jobConf)
val format = CarbonInputFormatUtil
.createCarbonTableInputFormat(identifier, partitionIds.asJava, job)
CarbonInputFormat.setTableInfo(job.getConfiguration, carbonTable.getTableInfo)
val splits = format.getSplitsOfOneSegment(job, segmentId,
oldPartitionIdList.map(_.asInstanceOf[Integer]).asJava, partitionInfo)
val blockList = splits.asScala.map(_.asInstanceOf[CarbonInputSplit])
val tableBlockInfoList = CarbonInputSplit.createBlocks(blockList.asJava)
tableBlockInfoList
}
@throws(classOf[IOException])
def deleteOriginalCarbonFile(alterPartitionModel: AlterPartitionModel,
identifier: AbsoluteTableIdentifier,
partitionIds: List[String], dbName: String, tableName: String,
partitionInfo: PartitionInfo): Unit = {
val carbonLoadModel = alterPartitionModel.carbonLoadModel
val segmentId = alterPartitionModel.segmentId
val oldPartitionIds = alterPartitionModel.oldPartitionIds
val newTime = carbonLoadModel.getFactTimeStamp
val tablePath = carbonLoadModel.getTablePath
val tableBlockInfoList =
getPartitionBlockList(identifier, segmentId, partitionIds, oldPartitionIds,
partitionInfo, carbonLoadModel.getCarbonDataLoadSchema.getCarbonTable).asScala
val pathList: util.List[String] = new util.ArrayList[String]()
tableBlockInfoList.foreach{ tableBlockInfo =>
val path = tableBlockInfo.getFilePath
val timestamp = CarbonTablePath.DataFileUtil.getTimeStampFromFileName(path)
if (timestamp.toLong != newTime) {
// add carbondata file
pathList.add(path)
// add index file
val version = tableBlockInfo.getVersion
val taskNo = CarbonTablePath.DataFileUtil.getTaskNo(path)
val batchNo = CarbonTablePath.DataFileUtil.getBatchNoFromTaskNo(taskNo)
val taskId = CarbonTablePath.DataFileUtil.getTaskIdFromTaskNo(taskNo)
val bucketNumber = CarbonTablePath.DataFileUtil.getBucketNo(path)
val indexFilePath =
new Path(new Path(path).getParent,
CarbonTablePath.getCarbonIndexFileName(taskId,
bucketNumber.toInt,
batchNo,
timestamp,
segmentId)).toString
// indexFilePath could be duplicated when multiple data file related to one index file
if (indexFilePath != null && !pathList.contains(indexFilePath)) {
pathList.add(indexFilePath)
}
}
}
val files: util.List[File] = new util.ArrayList[File]()
for (path <- pathList.asScala) {
val file = new File(path)
files.add(file)
}
CarbonUtil.deleteFiles(files.asScala.toArray)
if (!files.isEmpty) {
val carbonTable = alterPartitionModel.carbonLoadModel.getCarbonDataLoadSchema.getCarbonTable
val updatedSegFile: String = mergeAndUpdateSegmentFile(alterPartitionModel,
identifier,
segmentId,
carbonTable,
files.asScala)
val segmentFiles = Seq(new Segment(alterPartitionModel.segmentId, updatedSegFile, null))
.asJava
if (!CarbonUpdateUtil.updateTableMetadataStatus(
new util.HashSet[Segment](Seq(new Segment(alterPartitionModel.segmentId,
null, null)).asJava),
carbonTable,
alterPartitionModel.carbonLoadModel.getFactTimeStamp.toString,
true,
new util.ArrayList[Segment](0),
new util.ArrayList[Segment](segmentFiles), "")) {
throw new IOException("Data update failed due to failure in table status updation.")
}
}
}
/**
* Used to extract PartitionerFields for aggregation datamaps.
* This method will keep generating partitionerFields until the sequence of
* partition column is broken.
*
* For example: if x,y,z are partition columns in main table then child tables will be
* partitioned only if the child table has List("x,y,z", "x,y", "x") as the projection columns.
*
*
*/
def getPartitionerFields(allPartitionColumn: Seq[String],
fieldRelations: mutable.LinkedHashMap[Field, DataMapField]): Seq[PartitionerField] = {
def generatePartitionerField(partitionColumn: List[String],
partitionerFields: Seq[PartitionerField]): Seq[PartitionerField] = {
partitionColumn match {
case head :: tail =>
// Collect the first relation which matched the condition
val validRelation = fieldRelations.zipWithIndex.collectFirst {
case ((field, dataMapField), index) if
dataMapField.columnTableRelationList.getOrElse(Seq()).nonEmpty &&
head.equals(dataMapField.columnTableRelationList.get.head.parentColumnName) &&
dataMapField.aggregateFunction.isEmpty =>
(PartitionerField(field.name.get,
field.dataType,
field.columnComment), allPartitionColumn.indexOf(head))
}
if (validRelation.isDefined) {
val (partitionerField, index) = validRelation.get
// if relation is found then check if the partitionerFields already found are equal
// to the index of this element.
// If x with index 1 is found then there should be exactly 1 element already found.
// If z with index 2 comes directly after x then this check will be false are 1
// element is skipped in between and index would be 2 and number of elements found
// would be 1. In that case return empty sequence so that the aggregate table is not
// partitioned on any column.
if (index == partitionerFields.length) {
generatePartitionerField(tail, partitionerFields :+ partitionerField)
} else {
Seq.empty
}
} else {
// if not found then countinue search for the rest of the elements. Because the rest
// of the elements can also decide if the table has to be partitioned or not.
generatePartitionerField(tail, partitionerFields)
}
case Nil =>
// if end of list then return fields.
partitionerFields
}
}
generatePartitionerField(allPartitionColumn.toList, Seq.empty)
}
private def mergeAndUpdateSegmentFile(alterPartitionModel: AlterPartitionModel,
identifier: AbsoluteTableIdentifier,
segmentId: String,
carbonTable: CarbonTable, filesToBeDelete: Seq[File]) = {
val metadataDetails =
SegmentStatusManager.readTableStatusFile(
CarbonTablePath.getTableStatusFilePath(carbonTable.getTablePath))
val segmentFile =
metadataDetails.find(_.getLoadName.equals(segmentId)).get.getSegmentFile
var allSegmentFiles: Seq[CarbonFile] = Seq.empty[CarbonFile]
val file = SegmentFileStore.writeSegmentFile(
carbonTable,
alterPartitionModel.segmentId,
System.currentTimeMillis().toString)
if (segmentFile != null) {
allSegmentFiles ++= FileFactory.getCarbonFile(
SegmentFileStore.getSegmentFilePath(carbonTable.getTablePath, segmentFile)) :: Nil
}
val updatedSegFile = {
val carbonFile = FileFactory.getCarbonFile(
SegmentFileStore.getSegmentFilePath(carbonTable.getTablePath, file))
allSegmentFiles ++= carbonFile :: Nil
val mergedSegFileName = SegmentFileStore.genSegmentFileName(
segmentId,
alterPartitionModel.carbonLoadModel.getFactTimeStamp.toString)
val tmpFile = mergedSegFileName + "_tmp"
val segmentStoreFile = SegmentFileStore.mergeSegmentFiles(
tmpFile,
CarbonTablePath.getSegmentFilesLocation(carbonTable.getTablePath),
allSegmentFiles.toArray)
val indexFiles = segmentStoreFile.getLocationMap.values().asScala.head.getFiles
filesToBeDelete.foreach(f => indexFiles.remove(f.getName))
SegmentFileStore.writeSegmentFile(
segmentStoreFile,
CarbonTablePath.getSegmentFilesLocation(carbonTable.getTablePath) +
CarbonCommonConstants.FILE_SEPARATOR + mergedSegFileName + CarbonTablePath.SEGMENT_EXT)
carbonFile.delete()
FileFactory.getCarbonFile(
SegmentFileStore.getSegmentFilePath(
carbonTable.getTablePath, tmpFile + CarbonTablePath.SEGMENT_EXT)).delete()
mergedSegFileName + CarbonTablePath.SEGMENT_EXT
}
updatedSegFile
}
}
| jatin9896/incubator-carbondata | integration/spark-common/src/main/scala/org/apache/spark/util/PartitionUtils.scala | Scala | apache-2.0 | 15,096 |
package springnz.sparkplug.executor
import akka.actor.ActorRef
import springnz.sparkplug.executor.MessageTypes.JobRequest
import springnz.sparkplug.core.SparkOperation
object MessageTypes {
case object ClientReady
case object ServerReady
case object ShutDown
case object CancelAllJobs
case class ServerError(reason: Throwable)
case class JobRequest(factoryClassName: String, data: Option[Any] = None)
case class JobSuccess(jobRequest: JobRequest, response: Any)
case class JobFailure(jobRequest: JobRequest, reason: Throwable)
class SparkplugException(message: String) extends Exception(message)
}
object InternalMessageTypes {
case class RoutedRequest(jobRequest: JobRequest, originator: ActorRef)
}
| springnz/sparkplug | sparkplug-executor/src/main/scala/springnz/sparkplug/executor/MessageTypes.scala | Scala | mit | 728 |
package pep_094
object Solution {
val MAX = 1000000000
val ats = for {
a <- Iterator.from(3, 2).takeWhile(_ < MAX / 3).map(_.toLong)
b <- Iterator(a - 1, a + 1)
b2 = b / 2
hh = a * a - b2 * b2
h = math.sqrt(hh)
if h.isWhole()
if h.toLong * h.toLong == hh // NEVER trust a Double with more than 15 digits
} yield a + a + b
def solve() = ats.takeWhile(_ < MAX).sum
}
| filippovitale/pe | pe-solution/src/main/scala/pep_094/Solution.scala | Scala | mit | 404 |
package nasa.nccs.utilities
import java.util.jar.JarFile
import com.joestelmach.natty
import ucar.nc2.time.CalendarDate
import scala.collection.JavaConversions._
import scala.collection.JavaConverters._
import scala.collection.mutable
import org.slf4j.Logger
object cdsutils {
def flatlist[T]( values: Option[T]* ): List[T] = values.flatten.toList
def getInstance[T]( cls: Class[T] ) = cls.getConstructor().newInstance()
def findNonNull[T]( values: T* ): Option[T] = values.toList.find( _ != null )
def cdata(obj: Any): String = "<![CDATA[\\n " + obj.toString + "\\n]]>"
def isValid(obj: Any): Boolean = Option(obj) match { case Some(x) => true; case None => false }
def getProjectJars: Array[JarFile] = {
import java.io.File
val cpitems = System.getProperty("java.class.path").split(File.pathSeparator)
for ( cpitem <- cpitems; fileitem = new File(cpitem); if fileitem.isFile && fileitem.getName.toLowerCase.endsWith(".jar") ) yield new JarFile(fileitem)
}
def ptime[R]( label: String)(block: => R): R = {
val t0 = System.nanoTime()
val result = block
val t1 = System.nanoTime()
println( "%s: Time = %.4f s".format( label, (t1-t0)/1.0E9 ))
result
}
def time[R](logger:Logger, label: String)(block: => R): R = {
val t0 = System.nanoTime()
val result = block
val t1 = System.nanoTime()
logger.debug( "%s: Time = %.4f s".format( label, (t1-t0)/1.0E9 ))
result
}
def getJarAttribute(jarFile: JarFile, attribute_name: String ): String = {
val manifest = jarFile.getManifest
if( isValid(manifest) ) manifest.getMainAttributes.getValue(attribute_name) else ""
}
def getClassesFromJar(jarFile: JarFile): Iterator[Class[_]] = {
import java.net.{URL, URLClassLoader}, java.util.jar.JarEntry
val cloader: URLClassLoader = URLClassLoader.newInstance(Array(new URL("jar:file:" + jarFile.getName + "!/")))
for (je: JarEntry <- jarFile.entries; ename = je.getName; if ename.endsWith(".class");
cls = cloader.loadClass(ename.substring(0, ename.length - 6).replace('/', '.')) ) yield cls
}
object dateTimeParser {
import com.joestelmach.natty
private val parser = new natty.Parser()
def parse(input: String): CalendarDate = {
val caldates = mutable.ListBuffer[CalendarDate]()
val groups = parser.parse(input).toList
for (group: natty.DateGroup <- groups; date: java.util.Date <- group.getDates.toList) caldates += CalendarDate.of(date)
assert( caldates.size == 1, " DateTime Parser Error: parsing '%s'".format(input) )
caldates.head
}
}
// def loadExtensionModule( jar_file: String, module: Class ): Unit = {
// var classLoader = new java.net.URLClassLoader( Array(new java.io.File( jar_file ).toURI.toURL ), this.getClass.getClassLoader)
// var clazzExModule = classLoader.loadClass(module.GetClass.GetName + "$") // the suffix "$" is for Scala "object",
// try {
// //"MODULE$" is a trick, and I'm not sure about "get(null)"
// var module = clazzExModule.getField("MODULE$").get(null).asInstanceOf[module]
// } catch {
// case e: java.lang.ClassCastException =>
// printf(" - %s is not Module\\n", clazzExModule)
// }
//
// }
}
object dateParseTest extends App {
val caldate:CalendarDate = cdsutils.dateTimeParser.parse( "10/10/1998 5:00 GMT")
println( caldate.toString )
}
| nasa-nccs-cds/esgfWpsApi | src/main/scala/nasa/nccs/utilities/generic.scala | Scala | gpl-2.0 | 3,405 |
package sangria.util
import language.postfixOps
import scala.concurrent.{Await, Future}
import scala.concurrent.duration._
trait AwaitSupport {
implicit class WithAwait[T](f: Future[T]) {
def await = Await.result(f, 2 seconds)
}
}
| narahari92/sangria | src/test/scala/sangria/util/AwaitSupport.scala | Scala | apache-2.0 | 242 |
/*
* Copyright 2001-2013 Artima, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.scalatest
import org.scalactic.{Equality, NormalizingEquality, Every}
import org.scalactic.StringNormalizations._
import SharedHelpers._
import FailureMessages.decorateToStringValue
import Matchers._
import exceptions.TestFailedException
class EveryShouldContainSpec extends Spec {
object `a List` {
val xs: Every[String] = Every("hi", "hi", "hi")
val caseLists: Every[String] = Every("tell", "them", "Hi")
object `when used with contain (value) syntax` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
xs should contain ("hi")
val e1 = intercept[TestFailedException] {
xs should contain ("ho")
}
e1.message.get should be (Resources.didNotContainExpectedElement(decorateToStringValue(xs), "\\"ho\\""))
e1.failedCodeFileName.get should be ("EveryShouldContainSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 4)
}
def `should use the implicit Equality in scope` {
xs should contain ("hi")
intercept[TestFailedException] {
xs should contain ("ho")
}
implicit val e = new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a != b
}
xs should contain ("ho")
intercept[TestFailedException] {
xs should contain ("hi")
}
}
def `should use an explicitly provided Equality` {
intercept[TestFailedException] {
caseLists should contain ("HI")
}
(caseLists should contain ("HI")) (decided by defaultEquality afterBeing lowerCased)
(caseLists should contain ("HI")) (after being lowerCased)
(caseLists should contain ("HI ")) (after being lowerCased and trimmed)
implicit val e = new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a != b
}
(xs should contain ("hi")) (decided by defaultEquality[String])
}
@Ignore def `should minimize normalization if an implicit NormalizingEquality is in scope` {
intercept[TestFailedException] {
caseLists should contain ("HI")
}
var normalizedInvokedCount = 0
implicit val e = new NormalizingEquality[String] {
def normalized(s: String): String = {
normalizedInvokedCount += 1
s.toLowerCase
}
def normalizedCanHandle(b: Any): Boolean = b.isInstanceOf[String]
def normalizedOrSame(b: Any): Any =
b match {
case s: String => normalized(s)
case _ => b
}
}
caseLists should contain ("HI")
normalizedInvokedCount should be (4)
}
}
object `when used with not contain value syntax` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
xs should not contain "ho"
val e3 = intercept[TestFailedException] {
xs should not contain "hi"
}
e3.message.get should be (Resources.containedExpectedElement(decorateToStringValue(xs), "\\"hi\\""))
e3.failedCodeFileName.get should be ("EveryShouldContainSpec.scala")
e3.failedCodeLineNumber.get should be (thisLineNumber - 4)
}
def `should use the implicit Equality in scope` {
xs should not contain "ho"
intercept[TestFailedException] {
xs should not contain "hi"
}
implicit val e = new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a != b
}
xs should not contain "hi"
intercept[TestFailedException] {
xs should not contain "ho"
}
}
def `should use an explicitly provided Equality` {
caseLists should not contain "HI"
caseLists should not contain "HI "
(caseLists should not contain "HI ") (decided by defaultEquality afterBeing lowerCased)
(caseLists should not contain "HI ") (after being lowerCased)
intercept[TestFailedException] {
(caseLists should not contain "HI") (decided by defaultEquality afterBeing lowerCased)
}
intercept[TestFailedException] {
(caseLists should not contain "HI ") (after being lowerCased and trimmed)
}
}
@Ignore def `should minimize normalization if an implicit NormalizingEquality is in scope` {
caseLists should not contain "HI"
var normalizedInvokedCount = 0
implicit val e = new NormalizingEquality[String] {
def normalized(s: String): String = {
normalizedInvokedCount += 1
s.toLowerCase
}
def normalizedCanHandle(b: Any): Boolean = b.isInstanceOf[String]
def normalizedOrSame(b: Any): Any =
b match {
case s: String => normalized(s)
case _ => b
}
}
intercept[TestFailedException] {
caseLists should not contain "HI"
}
normalizedInvokedCount should be (4)
}
}
object `when used with not (contain (value)) syntax` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
xs should not (contain ("ho"))
val e3 = intercept[TestFailedException] {
xs should not (contain ("hi"))
}
e3.message.get should be (Resources.containedExpectedElement(decorateToStringValue(xs), "\\"hi\\""))
e3.failedCodeFileName.get should be ("EveryShouldContainSpec.scala")
e3.failedCodeLineNumber.get should be (thisLineNumber - 4)
}
def `should use the implicit Equality in scope` {
xs should not (contain ("ho"))
intercept[TestFailedException] {
xs should not (contain ("hi"))
}
implicit val e = new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a != b
}
xs should not (contain ("hi"))
intercept[TestFailedException] {
xs should not (contain ("ho"))
}
}
def `should use an explicitly provided Equality` {
caseLists should not (contain ("HI"))
caseLists should not (contain ("HI "))
(caseLists should not (contain ("HI "))) (decided by defaultEquality afterBeing lowerCased)
(caseLists should not (contain ("HI "))) (after being lowerCased)
intercept[TestFailedException] {
(caseLists should not (contain ("HI"))) (decided by defaultEquality afterBeing lowerCased)
}
intercept[TestFailedException] {
(caseLists should not (contain ("HI"))) (after being lowerCased)
}
intercept[TestFailedException] {
(caseLists should not (contain ("HI "))) (after being lowerCased and trimmed)
}
}
@Ignore def `should minimize normalization if an implicit NormalizingEquality is in scope` {
caseLists should not (contain ("HI"))
var normalizedInvokedCount = 0
implicit val e = new NormalizingEquality[String] {
def normalized(s: String): String = {
normalizedInvokedCount += 1
s.toLowerCase
}
def normalizedCanHandle(b: Any): Boolean = b.isInstanceOf[String]
def normalizedOrSame(b: Any): Any =
b match {
case s: String => normalized(s)
case _ => b
}
}
intercept[TestFailedException] {
caseLists should not (contain ("HI"))
}
normalizedInvokedCount should be (4)
}
}
object `when used with (not contain value) syntax` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
xs should (not contain "ho")
val e3 = intercept[TestFailedException] {
xs should (not contain "hi")
}
e3.message.get should be (Resources.containedExpectedElement(decorateToStringValue(xs), "\\"hi\\""))
e3.failedCodeFileName.get should be ("EveryShouldContainSpec.scala")
e3.failedCodeLineNumber.get should be (thisLineNumber - 4)
}
def `should use the implicit Equality in scope` {
xs should (not contain "ho")
intercept[TestFailedException] {
xs should (not contain "hi")
}
implicit val e = new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a != b
}
xs should (not contain "hi")
intercept[TestFailedException] {
xs should (not contain "ho")
}
}
def `should use an explicitly provided Equality` {
caseLists should (not contain "HI")
caseLists should (not contain "HI ")
(caseLists should (not contain "HI ")) (decided by defaultEquality afterBeing lowerCased)
(caseLists should (not contain "HI ")) (after being lowerCased)
intercept[TestFailedException] {
(caseLists should (not contain "HI")) (decided by defaultEquality afterBeing lowerCased)
}
intercept[TestFailedException] {
(caseLists should (not contain "HI")) (after being lowerCased)
}
intercept[TestFailedException] {
(caseLists should (not contain "HI ")) (after being lowerCased and trimmed)
}
}
@Ignore def `should minimize normalization if an implicit NormalizingEquality is in scope` {
caseLists should (not contain "HI")
var normalizedInvokedCount = 0
implicit val e = new NormalizingEquality[String] {
def normalized(s: String): String = {
normalizedInvokedCount += 1
s.toLowerCase
}
def normalizedCanHandle(b: Any): Boolean = b.isInstanceOf[String]
def normalizedOrSame(b: Any): Any =
b match {
case s: String => normalized(s)
case _ => b
}
}
intercept[TestFailedException] {
caseLists should (not contain "HI")
}
normalizedInvokedCount should be (4)
}
}
object `when used with shouldNot contain value syntax` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
xs shouldNot contain ("ho")
val e3 = intercept[TestFailedException] {
xs shouldNot contain ("hi")
}
e3.message.get should be (Resources.containedExpectedElement(decorateToStringValue(xs), "\\"hi\\""))
e3.failedCodeFileName.get should be ("EveryShouldContainSpec.scala")
e3.failedCodeLineNumber.get should be (thisLineNumber - 4)
}
def `should use the implicit Equality in scope` {
xs shouldNot contain ("ho")
intercept[TestFailedException] {
xs shouldNot contain ("hi")
}
implicit val e = new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a != b
}
xs shouldNot contain ("hi")
intercept[TestFailedException] {
xs shouldNot contain ("ho")
}
}
def `should use an explicitly provided Equality` {
caseLists shouldNot contain ("HI")
caseLists shouldNot contain ("HI ")
(caseLists shouldNot contain ("HI ")) (decided by defaultEquality afterBeing lowerCased)
(caseLists shouldNot contain ("HI ")) (after being lowerCased)
intercept[TestFailedException] {
(caseLists shouldNot contain ("HI")) (decided by defaultEquality afterBeing lowerCased)
}
intercept[TestFailedException] {
(caseLists shouldNot contain ("HI ")) (after being lowerCased and trimmed)
}
}
@Ignore def `should minimize normalization if an implicit NormalizingEquality is in scope` {
caseLists shouldNot contain ("HI")
var normalizedInvokedCount = 0
implicit val e = new NormalizingEquality[String] {
def normalized(s: String): String = {
normalizedInvokedCount += 1
s.toLowerCase
}
def normalizedCanHandle(b: Any): Boolean = b.isInstanceOf[String]
def normalizedOrSame(b: Any): Any =
b match {
case s: String => normalized(s)
case _ => b
}
}
intercept[TestFailedException] {
caseLists shouldNot contain ("HI")
}
normalizedInvokedCount should be (4)
}
}
object `when used with shouldNot (contain (value)) syntax` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
xs shouldNot (contain ("ho"))
val e3 = intercept[TestFailedException] {
xs shouldNot (contain ("hi"))
}
e3.message.get should be (Resources.containedExpectedElement(decorateToStringValue(xs), "\\"hi\\""))
e3.failedCodeFileName.get should be ("EveryShouldContainSpec.scala")
e3.failedCodeLineNumber.get should be (thisLineNumber - 4)
}
def `should use the implicit Equality in scope` {
xs shouldNot (contain ("ho"))
intercept[TestFailedException] {
xs shouldNot (contain ("hi"))
}
implicit val e = new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a != b
}
xs shouldNot (contain ("hi"))
intercept[TestFailedException] {
xs shouldNot (contain ("ho"))
}
}
def `should use an explicitly provided Equality` {
caseLists shouldNot (contain ("HI"))
caseLists shouldNot (contain ("HI "))
(caseLists shouldNot (contain ("HI "))) (decided by defaultEquality afterBeing lowerCased)
(caseLists shouldNot (contain ("HI "))) (after being lowerCased)
intercept[TestFailedException] {
(caseLists shouldNot (contain ("HI"))) (decided by defaultEquality afterBeing lowerCased)
}
intercept[TestFailedException] {
(caseLists shouldNot (contain ("HI"))) (after being lowerCased)
}
intercept[TestFailedException] {
(caseLists shouldNot (contain ("HI "))) (after being lowerCased and trimmed)
}
}
@Ignore def `should minimize normalization if an implicit NormalizingEquality is in scope` {
caseLists shouldNot (contain ("HI"))
var normalizedInvokedCount = 0
implicit val e = new NormalizingEquality[String] {
def normalized(s: String): String = {
normalizedInvokedCount += 1
s.toLowerCase
}
def normalizedCanHandle(b: Any): Boolean = b.isInstanceOf[String]
def normalizedOrSame(b: Any): Any =
b match {
case s: String => normalized(s)
case _ => b
}
}
intercept[TestFailedException] {
caseLists shouldNot (contain ("HI"))
}
normalizedInvokedCount should be (4)
}
}
}
object `a collection of Lists` {
val list123s: Every[Every[Int]] = Every(Every(1, 2, 3), Every(1, 2, 3), Every(1, 2, 3))
val lists: Every[Every[Int]] = Every(Every(1, 2, 3), Every(1, 2, 3), Every(4, 5, 6))
val hiLists: Every[Every[String]] = Every(Every("hi"), Every("hi"), Every("hi"))
object `when used with contain (value) syntax` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (list123s) should contain (1)
atLeast (2, lists) should contain (1)
atMost (2, lists) should contain (4)
no (lists) should contain (7)
val e1 = intercept[TestFailedException] {
all (lists) should contain (1)
}
e1.failedCodeFileName.get should be ("EveryShouldContainSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, Many(4, 5, 6) did not contain element 1 (EveryShouldContainSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in Many(Many(1, 2, 3), Many(1, 2, 3), Many(4, 5, 6))"))
val e2 = intercept[TestFailedException] {
all (lists) should not contain (4)
}
e2.failedCodeFileName.get should be ("EveryShouldContainSpec.scala")
e2.failedCodeLineNumber.get should be (thisLineNumber - 3)
e2.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, Many(4, 5, 6) contained element 4 (EveryShouldContainSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in Many(Many(1, 2, 3), Many(1, 2, 3), Many(4, 5, 6))"))
val e3 = intercept[TestFailedException] {
all (lists) should contain (1)
}
e3.failedCodeFileName.get should be ("EveryShouldContainSpec.scala")
e3.failedCodeLineNumber.get should be (thisLineNumber - 3)
e3.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, Many(4, 5, 6) did not contain element 1 (EveryShouldContainSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in Many(Many(1, 2, 3), Many(1, 2, 3), Many(4, 5, 6))"))
}
def `should use the implicit Equality in scope` {
intercept[TestFailedException] {
all (hiLists) should contain ("ho")
}
implicit val e = new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a != b
}
all (hiLists) should contain ("ho")
intercept[TestFailedException] {
all (hiLists) should contain ("hi")
}
}
def `should use an explicitly provided Equality` {
intercept[TestFailedException] {
all (hiLists) should contain ("HI")
}
intercept[TestFailedException] {
all (hiLists) should contain ("HI ")
}
(all (hiLists) should contain ("HI")) (decided by defaultEquality afterBeing lowerCased)
(all (hiLists) should contain ("HI ")) (after being trimmed and lowerCased)
}
@Ignore def `should minimize normalization if an implicit NormalizingEquality is in scope` {
val hiHeHoLists: List[List[String]] = List(List("hi", "he", "ho"), List("hi", "he", "ho"), List("hi", "he", "ho"))
intercept[TestFailedException] {
all (hiHeHoLists) should contain ("HO")
}
var normalizedInvokedCount = 0
implicit val e = new NormalizingEquality[String] {
def normalized(s: String): String = {
normalizedInvokedCount += 1
s.toLowerCase
}
def normalizedCanHandle(b: Any): Boolean = b.isInstanceOf[String]
def normalizedOrSame(b: Any): Any =
b match {
case s: String => normalized(s)
case _ => b
}
}
all (hiHeHoLists) should contain ("HO")
normalizedInvokedCount should be (12)
}
}
object `when used with not contain value syntax` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (list123s) should not contain 4
atLeast (2, lists) should not contain 4
atMost (2, lists) should not contain 4
no (list123s) should not contain 1 // I will recommend against double negatives, but we should test it
val e1 = intercept[TestFailedException] {
all (lists) should not contain 6
}
e1.failedCodeFileName.get should be ("EveryShouldContainSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, Many(4, 5, 6) contained element 6 (EveryShouldContainSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in Many(Many(1, 2, 3), Many(1, 2, 3), Many(4, 5, 6))"))
}
def `should use the implicit Equality in scope` {
all (hiLists) should not contain "ho"
intercept[TestFailedException] {
all (hiLists) should not contain "hi"
}
implicit val e = new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a != b
}
all (hiLists) should not contain "hi"
intercept[TestFailedException] {
all (hiLists) should not contain "ho"
}
}
def `should use an explicitly provided Equality` {
all (hiLists) should not contain "HI"
all (hiLists) should not contain "HI "
intercept[TestFailedException] {
(all (hiLists) should not contain "HI") (decided by defaultEquality afterBeing lowerCased)
}
intercept[TestFailedException] {
(all (hiLists) should not contain "HI ") (after being trimmed and lowerCased)
}
}
}
object `when used with not (contain (value)) syntax` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (list123s) should not (contain (4))
atLeast (2, lists) should not (contain (4))
atMost (2, lists) should not (contain (4))
no (list123s) should not (contain (1)) // I will recommend against double negatives, but we should test it
val e1 = intercept[TestFailedException] {
all (lists) should not (contain (6))
}
e1.failedCodeFileName.get should be ("EveryShouldContainSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, Many(4, 5, 6) contained element 6 (EveryShouldContainSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in Many(Many(1, 2, 3), Many(1, 2, 3), Many(4, 5, 6))"))
}
def `should use the implicit Equality in scope` {
all (hiLists) should not (contain ("ho"))
intercept[TestFailedException] {
all (hiLists) should not (contain ("hi"))
}
implicit val e = new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a != b
}
all (hiLists) should not (contain ("hi"))
intercept[TestFailedException] {
all (hiLists) should not (contain ("ho"))
}
}
def `should use an explicitly provided Equality` {
all (hiLists) should not (contain ("HI"))
all (hiLists) should not (contain ("HI "))
intercept[TestFailedException] {
(all (hiLists) should not (contain ("HI"))) (decided by defaultEquality afterBeing lowerCased)
}
intercept[TestFailedException] {
(all (hiLists) should not (contain ("HI "))) (after being trimmed and lowerCased)
}
}
}
object `when used with (not contain value) syntax` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (list123s) should (not contain 4)
atLeast (2, lists) should (not contain 4)
atMost (2, lists) should (not contain 4)
no (list123s) should (not contain 1) // I will recommend against double negatives, but we should test it
val e1 = intercept[TestFailedException] {
all (lists) should (not contain 6)
}
e1.failedCodeFileName.get should be ("EveryShouldContainSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, Many(4, 5, 6) contained element 6 (EveryShouldContainSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in Many(Many(1, 2, 3), Many(1, 2, 3), Many(4, 5, 6))"))
}
def `should use the implicit Equality in scope` {
all (hiLists) should (not contain "ho")
intercept[TestFailedException] {
all (hiLists) should (not contain "hi")
}
implicit val e = new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a != b
}
all (hiLists) should (not contain "hi")
intercept[TestFailedException] {
all (hiLists) should (not contain "ho")
}
}
def `should use an explicitly provided Equality` {
all (hiLists) should (not contain "HI")
all (hiLists) should (not contain "HI ")
intercept[TestFailedException] {
(all (hiLists) should (not contain "HI")) (decided by defaultEquality afterBeing lowerCased)
}
intercept[TestFailedException] {
(all (hiLists) should (not contain "HI ")) (after being trimmed and lowerCased)
}
}
}
object `when used with shouldNot contain value syntax` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (list123s) shouldNot contain (4)
atLeast (2, lists) shouldNot contain (4)
atMost (2, lists) shouldNot contain (4)
no (list123s) shouldNot contain (1) // I will recommend against double negatives, but we should test it
val e1 = intercept[TestFailedException] {
all (lists) shouldNot contain (6)
}
e1.failedCodeFileName.get should be ("EveryShouldContainSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, Many(4, 5, 6) contained element 6 (EveryShouldContainSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in Many(Many(1, 2, 3), Many(1, 2, 3), Many(4, 5, 6))"))
}
def `should use the implicit Equality in scope` {
all (hiLists) shouldNot contain ("ho")
intercept[TestFailedException] {
all (hiLists) shouldNot contain ("hi")
}
implicit val e = new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a != b
}
all (hiLists) shouldNot contain ("hi")
intercept[TestFailedException] {
all (hiLists) shouldNot contain ("ho")
}
}
def `should use an explicitly provided Equality` {
all (hiLists) shouldNot contain ("HI")
all (hiLists) shouldNot contain ("HI ")
intercept[TestFailedException] {
(all (hiLists) shouldNot contain ("HI")) (decided by defaultEquality afterBeing lowerCased)
}
intercept[TestFailedException] {
(all (hiLists) shouldNot contain ("HI ")) (after being trimmed and lowerCased)
}
}
}
object `when used with shouldNot (contain (value)) syntax` {
def `should do nothing if valid, else throw a TFE with an appropriate error message` {
all (list123s) shouldNot (contain (4))
atLeast (2, lists) shouldNot (contain (4))
atMost (2, lists) shouldNot (contain (4))
no (list123s) shouldNot (contain (1)) // I will recommend against double negatives, but we should test it
val e1 = intercept[TestFailedException] {
all (lists) shouldNot (contain (6))
}
e1.failedCodeFileName.get should be ("EveryShouldContainSpec.scala")
e1.failedCodeLineNumber.get should be (thisLineNumber - 3)
e1.message should be (Some("'all' inspection failed, because: \\n" +
" at index 2, Many(4, 5, 6) contained element 6 (EveryShouldContainSpec.scala:" + (thisLineNumber - 5) + ") \\n" +
"in Many(Many(1, 2, 3), Many(1, 2, 3), Many(4, 5, 6))"))
}
def `should use the implicit Equality in scope` {
all (hiLists) shouldNot (contain ("ho"))
intercept[TestFailedException] {
all (hiLists) shouldNot (contain ("hi"))
}
implicit val e = new Equality[String] {
def areEqual(a: String, b: Any): Boolean = a != b
}
all (hiLists) shouldNot (contain ("hi"))
intercept[TestFailedException] {
all (hiLists) shouldNot (contain ("ho"))
}
}
def `should use an explicitly provided Equality` {
all (hiLists) shouldNot (contain ("HI"))
all (hiLists) shouldNot (contain ("HI "))
intercept[TestFailedException] {
(all (hiLists) shouldNot (contain ("HI"))) (decided by defaultEquality afterBeing lowerCased)
}
intercept[TestFailedException] {
(all (hiLists) shouldNot (contain ("HI "))) (after being trimmed and lowerCased)
}
}
}
}
}
| SRGOM/scalatest | scalatest-test/src/test/scala/org/scalatest/EveryShouldContainSpec.scala | Scala | apache-2.0 | 28,885 |
package com.giyeok.jparser.study.parsergen
import com.giyeok.jparser.NGrammar
import com.giyeok.jparser.examples.metalang.SimpleGrammars
import com.giyeok.jparser.metalang.MetaGrammar
import com.giyeok.jparser.parsergen.deprecated.{SimpleGen, SimpleGenGen}
import com.giyeok.jparser.utils.{AbstractEdge, AbstractGraph}
import com.giyeok.jparser.visualize.FigureGenerator.Spacing
import com.giyeok.jparser.visualize.{AbstractZestGraphWidget, BasicVisualizeResources, Interactable, NodeFigureGenerators}
import org.eclipse.draw2d.{ColorConstants, Figure, LineBorder}
import org.eclipse.swt.SWT
import org.eclipse.swt.events.{KeyEvent, KeyListener}
import org.eclipse.swt.layout.FillLayout
import org.eclipse.swt.widgets.{Composite, Display, Shell}
import org.eclipse.zest.core.viewers.GraphViewer
import org.eclipse.zest.core.widgets.{Graph, GraphConnection, ZestStyles}
class TopologyViewer(val simpleGen: SimpleGen) {
private val grammar = simpleGen.grammar
case class TopologyEdge(start: Int, end: Int, finishable: Boolean) extends AbstractEdge[Int]
class TopologyGraph(val nodes: Set[Int],
val edges: Set[TopologyEdge],
val edgesByStart: Map[Int, Set[TopologyEdge]],
val edgesByEnd: Map[Int, Set[TopologyEdge]])
extends AbstractGraph[Int, TopologyEdge, TopologyGraph] {
override def createGraph(nodes: Set[Int], edges: Set[TopologyEdge], edgesByStart: Map[Int, Set[TopologyEdge]], edgesByEnd: Map[Int, Set[TopologyEdge]]): TopologyGraph =
new TopologyGraph(nodes, edges, edgesByStart, edgesByEnd)
}
class SimpleGenGraphViewerWidget(parent: Composite, style: Int, val fig: NodeFigureGenerators[Figure])
extends Composite(parent, style)
with AbstractZestGraphWidget[Int, TopologyEdge, TopologyGraph]
with Interactable[Int, TopologyEdge, TopologyGraph] {
override val graphViewer: GraphViewer = new GraphViewer(this, style)
override val graphCtrl: Graph = graphViewer.getGraphControl
setLayout(new FillLayout())
override def createFigure(node: Int): Figure = {
val nodeFig = fig.fig.verticalFig(Spacing.Medium, Seq(
fig.fig.textFig(s"$node", fig.appear.default)
) ++ (simpleGen.nodes(node) map { k => fig.symbol.symbolPointerFig(grammar, k.symbolId, k.pointer) }))
nodeFig.setBackgroundColor(ColorConstants.buttonLightest)
nodeFig.setOpaque(true)
nodeFig.setBorder(new LineBorder(ColorConstants.darkGray))
nodeFig.setSize(nodeFig.getPreferredSize())
nodeFig
}
override def createConnection(edge: TopologyEdge): GraphConnection = {
val conn = new GraphConnection(graphCtrl, ZestStyles.CONNECTIONS_DIRECTED, nodesMap(edge.start), nodesMap(edge.end))
if (edge.finishable) {
conn.setText("finishable")
}
val count = edgesMap.keySet count { e => Set(e.start, e.end) == Set(edge.start, edge.end) }
conn.setCurveDepth(count * 20)
conn.setData(edge)
conn
}
}
def start(): Unit = {
val display = new Display()
val shell = new Shell(display)
val graphViewer = new SimpleGenGraphViewerWidget(shell, SWT.NONE, BasicVisualizeResources.nodeFigureGenerators)
var graph = new TopologyGraph(Set(), Set(), Map(), Map())
simpleGen.nodes.keys foreach { n => graph = graph.addNode(n) }
simpleGen.existables.edges foreach { e => graph = graph.addEdgeSafe(TopologyEdge(e.start, e.end, finishable = false)) }
simpleGen.finishableEdges foreach { e => graph = graph.addEdgeSafe(TopologyEdge(e._1, e._2, finishable = true)) }
graphViewer.addGraph(graph)
graphViewer.applyLayout(true)
graphViewer.addKeyListener(new KeyListener {
override def keyPressed(e: KeyEvent): Unit = {
e.keyCode match {
case 'R' | 'r' =>
graphViewer.applyLayout(true)
case _ => // do nothing
}
}
override def keyReleased(e: KeyEvent): Unit = {}
})
shell.setLayout(new FillLayout)
shell.open()
while (!shell.isDisposed()) {
if (!display.readAndDispatch()) {
display.sleep()
}
}
display.dispose()
}
}
object TopologyViewer {
def main(args: Array[String]): Unit = {
val grammar = NGrammar.fromGrammar(SimpleGrammars.arrayGrammar.toGrammar(MetaGrammar.translateForce))
new TopologyViewer(new SimpleGenGen(grammar).generateGenerator()).start()
}
}
| Joonsoo/moon-parser | study/src/main/scala/com/giyeok/jparser/study/parsergen/TopologyViewer.scala | Scala | mit | 4,751 |
package com.bgfurfeature.config
import scala.collection.mutable.ArrayBuffer
/**
* com.bgfurfeature.config
* Created by C.J.YOU on 2019/12/13.
*/
object Conf {
// parameters configuration
val nGram = 3
val updateFreq = 300000 // 5min
// api configuration
val segmentorHost = "http://localhost:8282"
// spark configuration
val master = "spark://localhost:7077"
val localDir = "/Users/Program/scala/data/tmp"
val perMaxRate = "5"
val interval = 3 // seconds
val parallelNum = "15"
val executorMem = "1G"
val concurrentJobs = "5"
val coresMax = "3"
// kafka configuration
val brokers = "localhost:9091,localhost:9092"
val zk = "localhost:2181"
val group = "wordFreqGroup"
val topics = "test"
// mysql configuration
val mysqlConfig = Map("url" -> "jdbc:mysql://localhost:3306/word_freq?characterEncoding=UTF-8", "username" -> "root", "password" -> "root")
val maxPoolSize = 5
val minPoolSize = 2
// user behavior
type DT = (String, ArrayBuffer[(Long, Long)])
// Kafka记录
val INDEX_LOG_TIME = 0
val INDEX_LOG_USER = 1
val INDEX_LOG_ITEM = 2
val SEPERATOR = "\\t"
// 窗口配置
val INDEX_TIEMSTAMP = 1
val MAX_CNT = 25
val EXPIRE_DURATION = 60 * 60 * 24 * 3
var windowSize = 72 * 3600
// redis config
val RECORD_SZ = 2
var redisIp = "127.0.0.1"
var redisPort = 6379
var passwd = ""
val checkpointDir = "/Users/xiaolitao/Program/scala/userBehaviorStatistic/tmp"
val streamIntervel = 3
val partitionNumber = 2
val batchSize = 64
}
| bgfurfeature/AI | src/main/scala/com/bgfurfeature/config/Conf.scala | Scala | apache-2.0 | 1,532 |
/* Copyright 2015 Matt Silbernagel
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package services
import akka.actor.Actor
import models._
import play.api.Play.current
import play.api.db.slick.DB
import play.Logger
import java.sql.SQLException
import scala.compat.Platform
import scala.util.Random
case class ChildAndStandard(childId: Long, standardId: Long)
case class CreateAssessment(parentUid: Long, childId: Long, standardId: Long)
case class ScoreAssessment(assessmentId: Long, studentId: Long, questionId: Long, standardId: Long, score: Long)
case class AssessmentQuestion(assessment: Assesment, question: Question, picture: Option[Upload])
case class TestAssessment(e: String)
class AssessmentActor extends Actor {
private val random = new Random
def receive = {
case childAndStandard: ChildAndStandard => {
val asses = DB.withSession { implicit s =>
Assesments.create(Assesment(None, childAndStandard.childId, Platform.currentTime, None))
}
sender ! None
}
case te: TestAssessment => {
sender ! "Hello"
}
case assessment: CreateAssessment => {
sender ! create(assessment)
}
case scoreAssessment: ScoreAssessment => {
sender ! score(scoreAssessment)
}
case _ => sender ! akka.actor.Status.Failure(new Exception("nothing sent to do"))
}
/**
* Finds a random question in the database based on the education level of the child,
* the standard that was asked for
* and that the child has not been scored on the question yet
*/
def create(createAssessment: CreateAssessment): Option[AssessmentQuestion] = {
DB.withSession { implicit s =>
//PersonService.isChildOf[AssessmentQuestion](createAssessment.parentUid,
//createAssessment.childId, cid => {
// this is a new assessment, so create a new assessment record
val assessment = Assesments.create(Assesment(None, createAssessment.childId, Platform.currentTime, None))
val question = chooseQuestion(
createAssessment.childId, createAssessment.standardId, true, None) getOrElse (Question(Some(-1L), "", None, None, None))
val pic = QuestionUploads.findUploadByQuestion(question.id.get);
Some(AssessmentQuestion(assessment, question, pic))
// })
}
}
def score(scoreAssessment: ScoreAssessment): Option[AssessmentQuestion] = {
DB.withSession { implicit s =>
// Get activty for this question...
Questions.findActivity(scoreAssessment.questionId) match {
case Some(activity) => {
val hws = activity.id map { activityId =>
// Create the score in the database...
val score = Scores.create(
Score(None, scoreAssessment.studentId, None, None, Some(activityId), Some(scoreAssessment.score), Platform.currentTime)
)
AssessmentHistories.create(AssessmentHistory(None, scoreAssessment.assessmentId, activityId, score.id.getOrElse(0)))
AssessmentHistories.findByAssessmentWithScore(scoreAssessment.assessmentId)
}
hws match {
case Some(history) => determineNextQuestion(history, history.length - 1, scoreAssessment)
case _ => None
}
}
case _ => None
}
}
}
def determineNextQuestion(history: List[AssessmentHistoryWithScore], place: Int, scoreAssessment: ScoreAssessment): Option[AssessmentQuestion] = {
DB.withSession { implicit s =>
val lastScore = history(place).score
val lastScoreValue = history(place).score map (_.score.get) getOrElse 0
if (lastThreeQuestionsScored(history)) {
None
} else {
// if the current score is not three, find a new question to ask
Assesments.find(scoreAssessment.assessmentId) match {
case Some(asses) => {
chooseQuestion(scoreAssessment.studentId, scoreAssessment.standardId, true, lastScore) map { question =>
val pic = QuestionUploads.findUploadByQuestion(question.id.get);
Some(AssessmentQuestion(asses, question, pic))
} getOrElse None
}
case _ => None
}
}
}
}
/**
* Find out if the last three scores are equal to three
*/
def lastThreeQuestionsScored(history: List[AssessmentHistoryWithScore]): Boolean = {
if (history.length < 3) {
return false;
} else {
val listIndex = history.length - 1
val lastScore = history(listIndex).score map (_.score.get) getOrElse 0
val secondLastScore = history(listIndex - 1).score map (_.score.get) getOrElse 0
val thirdLastScore = history(listIndex - 2).score map (_.score.get) getOrElse 0
return (lastScore == 3 && secondLastScore == 3 && thirdLastScore == 3)
}
}
/**
* This does the work of choosing the next question to ask
*/
def chooseQuestion(studentId: Long, standardId: Long, isRandom: Boolean, lastScored: Option[Score]): Option[Question] = {
DB.withSession { implicit s =>
val activities = Activities.filterByStandardLevelCategory(studentId, standardId, "question")
.filter(activity => {
activity.id map { aid =>
Scores.findByActivityAndStudent(aid, studentId) match {
case Some(activ) => false
case _ => true
}
} getOrElse true
})
if (activities.length < 1) {
Logger.debug("no questions to ask.. sorry")
None
} else {
// if lastScored is set, than we need to filter again by only statements that are appropriate
Logger.debug(s"lastScore: $lastScored");
val updatedActivities = lastScored.map { score =>
// get the score and activity and determine which statement it was scored for...
Scores.find(score.id getOrElse (0)) match {
case Some(sc) => {
Logger.debug(s"sc : $sc")
Logger.debug(s"finding acitivity with statements by id: ${sc.activityId.get}")
val currentSequence: Long = Activities.findWithStatements(sc.activityId.get) match {
case Some(aws) => {
aws.statements(0).sequence.getOrElse(1L)
}
case None => 0
}
Logger.debug(s"last scored was ${sc.score}");
Logger.debug(s"current statement sequence is $currentSequence")
val sequenceToUse = sc.score match {
case Some(1) => if (currentSequence > 1) currentSequence - 2 else 1
case Some(2) => if (currentSequence > 0) currentSequence - 1 else 1
case Some(3) => currentSequence
case Some(4) => currentSequence + 1
case Some(5) => currentSequence + 2
case _ => 1
}
Logger.debug(s"sequence to use is $sequenceToUse")
Statements.findBySequence(sequenceToUse, standardId, studentId) map { statement =>
Logger.debug(s"statement to use $statement")
activities.filter(activity => {
Activities.includesStatement(statement.id.get, activity.id.get)
})
} getOrElse {
activities
}
}
case None => activities
}
} getOrElse activities
// choose a random activity and get the question for it..a
if (isRandom) {
updatedActivities(random.nextInt(updatedActivities.length)).resourceId map (x =>
Questions.findByResourceId(x) getOrElse Question(Some(-1L), "", None, None, None)
)
} else {
None
}
}
}
}
private def nextQuestion(assessment: Assesment, studentId: Long, standardId: Long): JsonQuestion = {
DB.withSession { implicit s =>
try {
// Find the student and his/her grade level
val (student: Person, edLevel: EducationLevel) = People.findWithEducationLevel(studentId);
// Find all statements in students grade level
val statements: List[Statement] = StatementLevels.findStatements(edLevel.id.get);
// Get all questions available to students grade level
val questions = for {
statement <- statements
q = Questions.findByStatement(statement.id.get)
} yield q._1
val flatQuestions = questions.flatten
// fiter out the questions already asked and answered
val filteredQuestions = flatQuestions.filter(q => {
Scores.findByQuestionAndStudent(q.id.get, studentId) match {
case Some(questionScore) => false
case None => true
}
})
// now choose a random one to ask
// in the future, this needs to be smarter but MVP is asking a random question
val finalQuestion = filteredQuestions(random.nextInt(filteredQuestions.length))
Questions.convertToJsonQuestion(finalQuestion, None, None)
} catch {
case e: java.util.NoSuchElementException => JsonQuestion(Some(-1), "No questions available!", None, None, None, None, None)
case e2: SQLException => {
// Let's back out of our assessment since we can't ask any questions
Assesments.delete(assessment.id.get)
JsonQuestion(Some(-1), e2.getMessage(), None, None, None, None, None)
}
}
}
}
}
| silbermm/proximal | app/services/AssessmentActor.scala | Scala | apache-2.0 | 9,888 |
package software.egger.jirapaymosync
import com.atlassian.jira.rest.client.api.domain.Issue
import software.egger.jirapaymosync.paymo.Task
class TaskListMatcher(jiraIssues: Iterable[Issue])
{
def matchWith(paymoTasks: Iterable[Task]): Iterable[TaskMatch] =
{
val matching = for (issue <- jiraIssues)
yield new TaskMatch(Some(issue), paymoTasks find (_.getName startsWith issue.getKey + " "))
val matchingTasks = matching.filter(_.paymoTask.isDefined).map(_.paymoTask.get)
val nonMatchingTasks = paymoTasks filterNot (matchingTasks.toList contains _)
matching ++ (nonMatchingTasks map (task => new TaskMatch(None, Some(task))))
}
}
| eggeral/jira-paymo-sync | src/main/scala/software/egger/jirapaymosync/TaskListMatcher.scala | Scala | apache-2.0 | 662 |
package com.enkidu.lignum.parsers.commons
import org.parboiled2
import org.parboiled2.{ CharPredicate, Rule0 }
import scala.language.implicitConversions
trait AbstractParser extends parboiled2.Parser with SpecialCharacters with Keywords{
protected implicit def str2Rule(s: String): Rule0 =
if (s.endsWith(" "))
rule { str(s.dropRight(2)) ~ oneOrMore(spacing) }
else if (s.endsWith(" "))
rule { str(s.dropRight(1)) ~ whitespace }
else rule { str(s) }
protected def whitespace: Rule0 = rule { zeroOrMore(spacing) }
private def spacing: Rule0 = rule { anyOf(" \t\u000C") | newLine | eolComment | traditionalComment }
private def newLine: Rule0 = rule { "\r\n" | '\n' }
private def traditionalComment: Rule0 = rule { "/*" ~ zeroOrMore(!("*/") ~ CharPredicate.All) ~ "*/" }
private def eolComment: Rule0 = rule { "//" ~ zeroOrMore(!("\n" | "\r\n") ~ CharPredicate.All) ~ ("\n" | "\r\n") }
protected def identifierChar = rule { CharPredicate.AlphaNum | "$" | "_" }
protected def identifierStart = rule { CharPredicate.Alpha | "$" | "_" }
protected def octalNumber: Rule0 = rule('0' ~ oneOrMore(octalDigit))
protected def decimalNumber: Rule0 = rule('0' | CharPredicate('1' to '9') ~ zeroOrMore(decimalDigit))
protected def hexNumber: Rule0 = rule("0" ~ ("x" | "X") ~ oneOrMore(hexDigit))
protected def binaryNumber: Rule0 = rule("0" ~ ("b" | "B") ~ oneOrMore(binaryDigit))
protected def octalDigit: Rule0 = rule(CharPredicate('0' to '7'))
protected def decimalDigit: Rule0 = rule(CharPredicate.Digit)
protected def hexDigit: Rule0 = rule(CharPredicate.HexDigit)
protected def binaryDigit: Rule0 = rule(CharPredicate('0', '1'))
protected def escapeSequence: Rule0 = rule { ("\\" ~ anyOf("btnfr\"'\\")) | octalEscape }
protected def octalEscape: Rule0 = rule {
"\\" ~ {
(CharPredicate('0' to '3') ~ octalDigit ~ octalDigit) |
octalDigit ~ optional(octalDigit)
}
}
}
| marek1840/java-parser | src/main/scala/com/enkidu/lignum/parsers/commons/AbstractParser.scala | Scala | mit | 1,955 |
/*
* Copyright © 2014 Teo Klestrup, Carl Dybdahl
*
* This file is part of Republix.
*
* Republix is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Republix is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Republix. If not, see <http://www.gnu.org/licenses/>.
*/
package republix
import java.awt._
import java.awt.event._
import javax.swing._
package object ui {
def on(body: => Unit): ActionListener = new ActionListener() {
def actionPerformed(ev: ActionEvent) = { body }
}
def debug(comp: JComponent): Unit = {
comp.setBorder(BorderFactory.createLineBorder(Color.BLACK, 3))
}
def groupLayout(owner: JComponent): GroupLayout = {
val layout = new GroupLayout(owner)
layout.setAutoCreateGaps(true)
layout.setAutoCreateContainerGaps(true)
layout
}
def swing(body: => Unit) = SwingUtilities.invokeLater (new Runnable { def run() = {
body
}})
} | teozkr/republix-online | republix/src/main/scala/republix/ui/package.scala | Scala | agpl-3.0 | 1,356 |
package models
import fr.njin.playoauth.common.OAuth
import fr.njin.playoauth.common.domain.OauthClient
import org.joda.time._
import scalikejdbc._
import scalikejdbc.async._
import scala.concurrent.Future
import java.util.UUID
/**
* User: bathily
* Date: 01/10/13
*/
case class App(pid: Long,
ownerId: Long,
owner: Option[User] = None,
id: String,
secret: String,
name: String,
description: String,
uri: String,
iconUri: Option[String],
redirectUris: Option[Seq[String]],
isWebApp: Boolean,
isNativeApp: Boolean,
createdAt: DateTime) extends ShortenedNames with OauthClient {
val icon = iconUri.getOrElse(controllers.routes.Assets.at("images/default.png").url)
def save()(implicit session: AsyncDBSession, cxt: EC): Future[App] = App.save(this)
def destroy()(implicit session: AsyncDBSession, cxt: EC): Future[App] = App.destroy(this)
val redirectUri: Option[String] = redirectUris.flatMap(_.headOption)
val authorized: Boolean = true
val allowedResponseType: Seq[String] = (isWebApp || isNativeApp match {
case true => Seq(OAuth.ResponseType.Token)
case _ => Seq.empty
}) ++ Seq(OAuth.ResponseType.Code)
val allowedGrantType: Seq[String] = Seq(OAuth.GrantType.AuthorizationCode, OAuth.GrantType.ClientCredentials, OAuth.GrantType.RefreshToken)
val issuedAt: Long = createdAt.getMillis
}
object App extends SQLSyntaxSupport[App] with ShortenedNames {
override val columnNames: Seq[String] = Seq("pid", "owner_id", "id", "secret", "name", "description", "uri", "icon_uri",
"redirect_uris", "is_web_app", "is_native_app", "created_at")
def apply(a: SyntaxProvider[App])(rs: WrappedResultSet): App = apply(a.resultName)(rs)
def apply(a: ResultName[App])(rs: WrappedResultSet): App = new App(
pid = rs.long(a.pid),
ownerId = rs.long(a.ownerId),
id = rs.string(a.id),
secret = rs.string(a.secret),
name = rs.string(a.name),
description = rs.string(a.description),
uri = rs.string(a.uri),
iconUri = rs.stringOpt(a.iconUri),
redirectUris = rs.stringOpt(a.redirectUris).map(_.split(",")),
//TODO Check the doc for boolean mapping
isWebApp = rs.int(a.isWebApp) == 1,
isNativeApp = rs.int(a.isNativeApp) == 1,
createdAt = rs.jodaDateTime(a.createdAt)
)
def apply(a: SyntaxProvider[App], u: SyntaxProvider[User])(rs: WrappedResultSet): App =
apply(a)(rs).copy(owner = Some(User(u)(rs)))
lazy val a = App.syntax("a")
private val u = User.u
def find(id: Long)(implicit session: AsyncDBSession, cxt: EC): Future[Option[App]] = {
withSQL {
select
.from[App](App as a)
.leftJoin(User as u).on(a.ownerId, u.id)
.where.eq(a.pid, id)
}.map(App(a, u)).single.future
}
def find(id: String)(implicit session: AsyncDBSession, cxt: EC): Future[Option[App]] = {
withSQL {
select
.from[App](App as a)
.leftJoin(User as u).on(a.ownerId, u.id)
.where.eq(a.id, id)
}.map(App(a, u)).single.future
}
def findForOwner(owner: User)(implicit session: AsyncDBSession, cxt: EC): Future[List[App]] =
findForOwner(owner.id)
def findForOwner(ownerId: Long)(implicit session: AsyncDBSession, cxt: EC): Future[List[App]] = {
withSQL {
select
.from[App](App as a)
.leftJoin(User as u).on(a.ownerId, u.id)
.where.eq(a.ownerId, ownerId)
.orderBy(a.id)
}.map(App(a, u)).list.future
}
def create(owner: User, id: String = UUID.randomUUID().toString, secret: String = UUID.randomUUID().toString,
name: String, description: String, uri: String, iconUri: Option[String],
redirectUris: Option[Seq[String]], isWebApp: Boolean, isNativeApp: Boolean, createdAt: DateTime = DateTime.now())
(implicit session: AsyncDBSession, ctx: EC): Future[App] = {
withSQL {
insert.into(App).namedValues(
column.ownerId -> owner.id,
column.id -> id,
column.secret -> secret,
column.name -> name,
column.description -> description,
column.uri -> uri,
column.iconUri -> iconUri,
column.redirectUris -> redirectUris.map(_.mkString(",")),
column.isWebApp -> isWebApp,
column.isNativeApp -> isNativeApp,
column.createdAt -> createdAt
)
}.updateAndReturnGeneratedKey().future.map(App(_, owner.id, Option(owner), id, secret, name, description, uri, iconUri, redirectUris, isWebApp, isNativeApp, createdAt))
}
def save(app: App)(implicit session: AsyncDBSession, cxt: EC): Future[App] = {
withSQL {
update(App).set(
column.name -> app.name,
column.description -> app.description,
column.uri -> app.uri,
column.iconUri -> app.iconUri,
column.redirectUris -> app.redirectUris.map(_.mkString(",")),
column.isWebApp -> app.isWebApp,
column.isNativeApp -> app.isNativeApp
).where.eq(column.pid, app.pid)
}.update.future.map(_ => app)
}
def destroy(app: App)(implicit session: AsyncDBSession, cxt: EC): Future[App] = {
withSQL {
delete.from(App)
.where.eq(column.pid, app.pid)
}.update.future.map(_ => app)
}
} | njin-fr/play-oauth | play-oauth-server/app/models/App.scala | Scala | apache-2.0 | 5,336 |
package org.bitcoins.wallet
import org.bitcoins.asyncutil.AsyncUtil
import org.bitcoins.core.currency._
import org.bitcoins.server.BitcoindRpcBackendUtil
import org.bitcoins.testkit.wallet.{
BitcoinSWalletTest,
WalletAppConfigWithBitcoindNewestFixtures
}
import org.bitcoins.testkitcore.util.TestUtil.bech32Address
import scala.concurrent.duration.DurationInt
class BitcoindBlockPollingTest
extends WalletAppConfigWithBitcoindNewestFixtures {
it must "properly setup and poll blocks from bitcoind" in {
walletAppConfigWithBitcoind =>
val bitcoind = walletAppConfigWithBitcoind.bitcoind
implicit val walletAppConfig = walletAppConfigWithBitcoind.walletAppConfig
val amountToSend = Bitcoins.one
for {
// Setup wallet
tmpWallet <-
BitcoinSWalletTest.createDefaultWallet(bitcoind, bitcoind, None)
wallet =
BitcoindRpcBackendUtil.createWalletWithBitcoindCallbacks(bitcoind,
tmpWallet,
None)
// Assert wallet is empty
isEmpty <- wallet.isEmpty()
_ = assert(isEmpty)
// Send to wallet
addr <- wallet.getNewAddress()
_ <- bitcoind.sendToAddress(addr, amountToSend)
// assert wallet hasn't seen it yet
firstBalance <- wallet.getBalance()
_ = assert(firstBalance == Satoshis.zero)
// Setup block polling
_ <- BitcoindRpcBackendUtil.startBitcoindBlockPolling(wallet,
bitcoind,
1.second)
_ <- bitcoind.generateToAddress(6, bech32Address)
// Wait for it to process
_ <- AsyncUtil.awaitConditionF(
() => wallet.getBalance().map(_ > Satoshis.zero),
1.second)
balance <- wallet.getConfirmedBalance()
//clean up
_ <- wallet.walletConfig.stop()
} yield assert(balance == amountToSend)
}
}
| bitcoin-s/bitcoin-s | wallet-test/src/test/scala/org/bitcoins/wallet/BitcoindBlockPollingTest.scala | Scala | mit | 2,086 |
class t5054_q2 {
/**
* A simple comment
*
* @param lost a lost parameter
* @usecase def test(): Int
*/
final def test(implicit lost: Int): Int = lost
}
| lrytz/scala | test/scaladoc/resources/t5054_q2.scala | Scala | apache-2.0 | 178 |
package dockarium.controller
import com.greencatsoft.angularjs.Controller
/**
* Created by becker on 2/19/15.
*/
object ServerVersionCtrl extends Controller {
override def initialize(): Unit = {
super.initialize()
}
}
| dockarium-io/dockarium-io | client/src/main/scala/dockarium/controller/ServerVersionCtrl.scala | Scala | apache-2.0 | 230 |
package edu.gemini.ags.gems.mascot.util
import breeze.linalg._
import edu.gemini.ags.gems.mascot.util.MatrixUtil._
import YUtils.{dif, abs, divide, digitize}
import YUtils.{cosh, sinh, add, grow}
import YUtils.pcen
/*
* Cubic spline interpolator.
*
* Ported from the Yorick original:
* Copyright (c) 2005, The Regents of the University of California.
* All rights reserved.
* This file is part of yorick (http://yorick.sourceforge.net).
* Read the accompanying LICENSE file for details.
*/
object Spline {
//func spline(dydx, y, x, xp, dydx1=, dydx0=)
///* DOCUMENT dydx= spline(y, x)
// -or- yp= spline(dydx, y, x, xp)
// -or- yp= spline(y, x, xp)
// computes the cubic spline curve passing through the points (X, Y).
//
// With two arguments, Y and X, spline returns the derivatives DYDX at
// the points, an array of the same length as X and Y. The DYDX values
// are chosen so that the piecewise cubic function returned by the four
// argument call will have a continuous second derivative.
//
// The X array must be strictly monotonic; it may either increase or
// decrease.
//
// The values Y and the derivatives DYDX uniquely determine a piecewise
// cubic function, whose value is returned in the four argument form.
// In this form, spline is analogous to the piecewise linear interpolator
// interp; usually you will regard it as a continuous function of its
// fourth argument, XP. The first argument, DYDX, will normally have
// been computed by a previous call to the two argument spline function.
// However, this need not be the case; another DYDX will generate a
// piecewise cubic function with continuous first derivative, but a
// discontinuous second derivative. For XP outside the extreme values
// of X, spline is linear (if DYDX1 or DYDX0 keywords were specified,
// the function will NOT have continuous second derivative at the
// endpoint).
//
// The XP array may have any dimensionality; the result YP will have
// the same dimensions as XP.
//
// If you only want the spline evaluated at a single set of XP, use the
// three argument form. This is equivalent to:
// yp= spline(spline(y,x), y, x, xp)
//
// The keywords DYDX1 and DYDX0 can be used to set the values of the
// returned DYDX(1) and DYDX(0) -- the first and last values of the
// slope, respectively. If either is not specified or nil, the slope at
// that end will be chosen so that the second derivative is zero there.
//
// The function tspline (tensioned spline) gives an interpolation
// function which lies between spline and interp, at the cost of
// requiring you to specify another parameter (the tension).
//
// SEE ALSO: interp, tspline
// */
/**
* spline(y,x) form.
*/
def spline(y: DenseVector[Double], x: DenseVector[Double]): DenseVector[Double] = {
// dx= x(dif);
// dy= y(dif);
// diag= (2./dx)(pcen);
// if (numberof(x)>2) diag(2:-1)*= 2.;
// rhs= (3.*dy/(dx*dx))(pcen);
// if (numberof(x)>2) rhs(2:-1)*= 2.;
// dx= 1./dx;
// if (is_void(dydx1)) {
// if (is_void(dydx0)) {
// return TDsolve(dx, diag, dx, rhs); /* simple natural spline */
// } else {
// dx= dx(1:-1);
// rhs= rhs(1:-1);
// rhs(0)-= dydx0*dx(0);
// return grow(TDsolve(dx, diag(1:-1), dx, rhs), dydx0);
// }
// } else {
// dydx1= double(dydx1);
// if (is_void(dydx0)) {
// dx= dx(2:0);
// rhs= rhs(2:0);
// rhs(1)-= dydx1*dx(1);
// return grow(dydx1, TDsolve(dx, diag(2:0), dx, rhs));
// } else {
// if (numberof(x)==2) return double([dydx1, dydx0]);
// dx= dx(2:-1);
// rhs= rhs(2:-1);
// rhs(1)-= dydx1*dx(1);
// rhs(0)-= dydx0*dx(0);
// return grow(dydx1, TDsolve(dx, diag(2:-1), dx, rhs), dydx0);
// }
// }
// val dx = dif(x)
// val dy = dif(y);
// val diag = pcen(divide(2.0, dx));
// if (x.size > 2) diag(1 until diag.size - 1) :*= 2.
// val rhs = pcen((dy * 3.) :/ (dx :* dx));
// if (x.size > 2) rhs(1 until rhs.size - 1) :*= 2.
// val dx2 = divide(1.0, dx);
//
// // Note: Since the Mascot code doesn't use the 2 optional parameters, the code dealing
// // with those has been left out here.
// tdSolve(dx2, diag, dx2, rhs); /* simple natural spline */
// XXX Since this function is a hotspot for performance, it was reimplemented in Java for better speed
// HotSpots.spline(y.toArray, x.toArray).asVector;
DenseVector(HotSpots.spline(y.toArray, x.toArray))
}
/**
* spline(y,x, xp) form
*/
def spline(y: DenseVector[Double], x: DenseVector[Double], xp: DenseVector[Double]): DenseVector[Double] = {
// dydx= spline(y,x,dydx1=dydx1,dydx0=dydx0);
// val dydx = spline(y, x);
// spline(dydx, y, x, xp)
// XXX Since this function is a hotspot for performance, it was reimplemented in Java for better speed
val ax = x.toArray
val ay = y.toArray
// HotSpots.spline(HotSpots.spline(ay, ax), ay, ax, xp.toArray).asVector
DenseVector(HotSpots.spline(HotSpots.spline(ay, ax), ay, ax, xp.toArray))
}
/**
* Returns the max value of the spline
*/
def splineMax(y: DenseVector[Double], x: DenseVector[Double], xp: DenseVector[Double], maxValue: Double): DenseVector[Double] = {
// dydx= spline(y,x,dydx1=dydx1,dydx0=dydx0);
// val dydx = spline(y, x);
// spline(dydx, y, x, xp)
// XXX Since this function is a hotspot for performance, it was reimplemented in Java for better speed
val ax = x.toArray
val ay = y.toArray
DenseVector(HotSpots.splineMax(HotSpots.spline(ay, ax), ay, ax, xp.toArray, maxValue))
}
/**
* spline(dydx,y,x,xp) form
*/
def spline(pdydx: DenseVector[Double], py: DenseVector[Double], px: DenseVector[Double], xp: DenseVector[Double]): DenseVector[Double] = {
// l= digitize(xp, x); /* index of lower boundary of interval containing xp */
// u= l+1;
//
// /* extend x, y, dydx so that l and u can be used as index lists */
// dx= x(0)-x(1);
// x= grow(x(1)-dx, x, x(0)+dx);
// y= grow(y(1)-dydx(1)*dx, y, y(0)+dydx(0)*dx);
// dydx= grow(dydx(1), dydx, dydx(0));
//
// xl= x(l);
// dx= double(x(u)-xl);
// yl= y(l);
// dy= y(u)-yl;
// dl= dydx(l);
// du= dydx(u);
// dydx= dy/dx;
// return poly(xp-xl, yl, dl, (3.*dydx-du-2.*dl)/dx, (du+dl-2.*dydx)/(dx*dx));
// val l = digitize(xp, px); /* index of lower boundary of interval containing xp */
// val u = l + 1;
//
// /* extend x, y, dydx so that l and u can be used as index lists */
// val dx = px(px.size - 1) - px(0);
// val x = grow(px(0) - dx, px, px(px.size - 1) + dx)
// val y = grow(py(0) - pdydx(0) * dx, py, py(py.size - 1) + pdydx(pdydx.size - 1) * dx);
// val dydx = grow(pdydx(0), pdydx, pdydx(pdydx.size - 1));
//
// val xl = x(l);
// val dx2 = x(u) :- xl;
// val yl = y(l);
// val dy = y(u) :- yl;
// val dl = dydx(l);
// val du = dydx(u);
// val dydx2 = dy :/ dx2;
// poly(xp :- xl, yl, dl, (dydx2 * 3.0 :- du :- dl * 2.0) :/ dx2, (du :+ dl :- dydx2 * 2.0) :/ (dx2 :* dx2));
// XXX Since this function is a hotspot for performance, it was reimplemented in Java for better speed
DenseVector(HotSpots.spline(pdydx.toArray, py.toArray, px.toArray, xp.toArray))
}
//func tspline(tension, d2ydx2, y, x, xp, dydx1=, dydx0=)
///* DOCUMENT d2ydx2= tspline(tension, y, x)
// -or- yp= tspline(tension, d2ydx2, y, x, xp)
// -or- yp= tspline(tension, y, x, xp)
// computes a tensioned spline curve passing through the points (X, Y).
//
// The first argument, TENSION, is a positive number which determines
// the "tension" in the spline. In a cubic spline, the second derivative
// of the spline function varies linearly between the points X. In the
// tensioned spline, the curvature is concentrated near the points X,
// falling off at a rate proportional to the tension. Between the points
// of X, the function varies as:
// y= C1*exp(k*x) + C2*exp(-k*x) + C3*x + C4
// The parameter k is proportional to the TENSION; for k->0, the function
// reduces to the cubic spline (a piecewise cubic function), while for
// k->infinity, the function reduces to the piecewise linear function
// connecting the points. The TENSION argument may either be a scalar
// value, in which case, k will be TENSION*(numberof(X)-1)/(max(X)-min(X))
// in every interval of X, or TENSION may be an array of length one less
// than the length of X, in which case the parameter k will be
// abs(TENSION/X(dif)), possibly varying from one interval to the next.
// You can use a variable tension to flatten "bumps" in one interval
// without affecting nearby intervals. Internally, tspline forces
// k*X(dif) to lie between 0.01 and 100.0 in every interval, independent
// of the value of TENSION. Typically, the most dramatic variation
// occurs between TENSION of 1.0 and 10.0.
//
// With three arguments, Y and X, spline returns the derivatives D2YDX2 at
// the points, an array of the same length as X and Y. The D2YDX2 values
// are chosen so that the tensioned spline function returned by the five
// argument call will have a continuous first derivative.
//
// The X array must be strictly monotonic; it may either increase or
// decrease.
//
// The values Y and the derivatives D2YDX2 uniquely determine a tensioned
// spline function, whose value is returned in the five argument form.
// In this form, tspline is analogous to the piecewise linear interpolator
// interp; usually you will regard it as a continuous function of its
// fifth (or fourth) argument, XP.
//
// The XP array may have any dimensionality; the result YP will have
// the same dimensions as XP.
//
// The D2YDX2 argument will normally have been computed by a previous call
// to the three argument tspline function. If you will be computing the
// values of the spline function for many sets of XP, use this five
// argument form.
//
// If you only want the tspline evaluated at a single set of XP, use the
// four argument form. This is equivalent to:
// yp= tspline(tension, tspline(tension,y,x), y, x, xp)
//
// The keywords DYDX1 and DYDX0 can be used to set the values of the
// returned DYDX(1) and DYDX(0) -- the first and last values of the
// slope, respectively. If either is not specified or nil, the slope at
// that end will be chosen so that the second derivative is zero there.
//
// The function tspline (tensioned spline) gives an interpolation
// function which lies between spline and interp, at the cost of
// requiring you to specify another parameter (the tension).
//
// SEE ALSO: interp, tspline
// */
/* tspline(tension, y,x) form */
def tspline(tension: Double, y: DenseVector[Double], x: DenseVector[Double]): DenseVector[Double] = {
// dx= x(dif);
// dy= y(dif);
// if (numberof(tension)==numberof(dx)) k= tension/abs(dx);
// else k= tension*numberof(dx)/(max(x)-min(x));
// k= max(min(k, 100./abs(dx)), 0.01/abs(dx));
// kdx= k*dx;
// skdx= sinh(kdx);
// diag= (cosh(kdx)/skdx-1./kdx)/k;
// diag= diag(pcen);
// if (numberof(x)>2) diag(2:-1)*= 2.;
// offd= (1./kdx-1./skdx)/k;
// ddydx= (dy/dx)(dif);
// if (is_void(dydx1)) {
// if (is_void(dydx0)) {
// if (numberof(x)==2) return [0., 0.];
// diag= diag(2:-1);
// offd= offd(2:-1);
// return grow(0., TDsolve(offd, diag, offd, ddydx), 0.);
// } else {
// dydx0-= dy(0)/dx(0);
// if (numberof(x)==2) return [0., dydx0/diag(0)];
// diag= diag(2:0);
// offd= offd(2:0);
// ddydx= grow(ddydx, dydx0);
// return grow(0., TDsolve(offd, diag, offd, ddydx));
// }
// } else {
// dydx1= dy(1)/dx(1) - dydx1;
// if (is_void(dydx0)) {
// if (numberof(x)==2) return [dydx1/diag(1), 0.];
// diag= diag(1:-1);
// offd= offd(1:-1);
// ddydx= grow(dydx1, ddydx);
// return grow(TDsolve(offd, diag, offd, ddydx), 0.);
// } else {
// dydx0-= dy(0)/dx(0);
// if (numberof(x)==2) return [dydx1/diag(1), dydx0/diag(0)];
// ddydx= grow(dydx1, ddydx, dydx0);
// return TDsolve(offd, diag, offd, ddydx);
// }
// }
val dx = dif(x)
val dy = dif(y)
// porting note: here tension is always a single value and x.size > 2
val k0 = tension * dx.size / (max(x) - min(x))
val k = YUtils.max(YUtils.min(k0, divide(100.0, abs(dx))), divide(0.01, abs(dx)))
val kdx = k :* dx
val skdx = sinh(kdx)
val diag = pcen(((cosh(kdx) :/ skdx) - divide(1.0, kdx)) :/ k)
if (x.size > 2) diag(1 until diag.size - 1) *= 2.0
val offd = (divide(1.0, kdx) - divide(1.0, skdx)) :/ k
val ddydx = dif(dy :/ dx)
// Note: Since the Mascot code doesn't use the 2 optional parameters, the code dealing
// with those has been left out here.
if (x.size == 2) {
DenseVector(0.0, 0.0)
} else {
val diag2 = diag(1 until diag.size - 1)
val offd2 = offd(1 until offd.size - 1)
grow(0.0, tdSolve(offd2, diag2, offd2, ddydx), 0.0)
}
}
/* tspline(tension, y,x,xp) form */
def tspline(tension: Double, y: DenseVector[Double], x: DenseVector[Double], xp: DenseVector[Double]): DenseVector[Double] = {
// d2ydx2= tspline(tension, y,x,dydx1=dydx1,dydx0=dydx0)
val d2ydx2 = tspline(tension, y, x)
tspline(tension, d2ydx2, y, x, xp)
}
/**
* tspline(tension, d2ydx2,y,x,xp) form.
*/
def tspline(tension: Double, pd2ydx2: DenseVector[Double], py: DenseVector[Double], px: DenseVector[Double], xp: DenseVector[Double]): DenseVector[Double] = {
// l= digitize(xp, x); /* index of lower boundary of interval containing xp */
// u= l+1;
//
// /* extend x so that l and u can be used as index lists --
// be careful not to make new intervals larger than necessary */
// n= numberof(x)-1; /* number of original intervals */
// dxavg= (x(0)-x(1))/n;
// if (dxavg>0.) {
// dx0= max(max(xp)-x(0), dxavg);
// dx1= max(x(1)-min(xp), dxavg);
// } else {
// dx0= min(min(xp)-x(0), dxavg);
// dx1= min(x(1)-max(xp), dxavg);
// }
// x= grow(x(1)-dx1, x, x(0)+dx0);
//
// /* compute k so that sinh(k*dx) is safe to compute */
// dx= x(dif);
// if (numberof(tension)==n) {
// k= grow(0., tension, 0.)/abs(dx);
// } else {
// k= tension/abs(dxavg);
// }
// k= max(min(k, 100./abs(dx)), 0.01/abs(dx));
//
// /* extend y carefully so that linear extrapolation happens automatically */
// k1= k(2);
// k0= k(-1);
// dydx1= (y(2)-y(1))/dx1;
// kdx= k1*dx1;
// d2u= d2ydx2(2);
// d2l= d2ydx2(1);
// dydx1+= ((d2u-d2l*cosh(kdx))/sinh(kdx) - (d2u-d2l)/kdx)/k1;
// dydx0= (y(0)-y(-1))/dx0;
// kdx= k0*dx0;
// d2u= d2ydx2(0);
// d2l= d2ydx2(-1);
// dydx0+= ((d2u*cosh(kdx)-d2l)/sinh(kdx) - (d2u-d2l)/kdx)/k0;
//
// y= grow(y(1)-dydx1*dx1, y, y(0)+dydx0*dx0);
// d2ydx2= grow(0., d2ydx2, 0.);
//
// /* begin interpolation */
// xu= x(u);
// xl= x(l);
// dx= xu-xl;
// dxl= xp-xl;
// yl= y(l);
// dydx= (y(u)-yl)/dx;
//
// km2= 1./(k*k);
// km2(1)= 0.;
// km2(0)= 0.;
// km2= km2(l);
// k= k(l);
// skdx= sinh(k*dx);
//
// d2u= d2ydx2(u);
// d2l= d2ydx2(l);
// d3= km2*(d2u-d2l)/dx;
//
// d2ydx2= d2u*sinh(k*dxl)/skdx + d2l*(sinh(k*(xu-xp))/skdx-1.);
//
// return yl + km2*d2ydx2 + (dydx-d3)*dxl;
// Porting note: parameter names start with p here, since they are reassigned later.
// Since some variables are also reassigned, sometimes with different data types, new 'vals'
// were introduced instead. For example, val dx = ...; val dx_2 = dx + ...
val l = digitize(xp, px).toSeq // index of lower boundary of interval containing xp
val u = add(l, 1)
// extend x so that l and u can be used as index lists --
// be careful not to make new intervals larger than necessary
val n = px.size - 1 // number of original intervals
val dxavg = (px(px.size - 1) - px(0)) / n
val dx0 = if (dxavg > 0.0) math.max(max(xp) - px(px.size - 1), dxavg) else math.min(min(xp) - px(px.size - 1), dxavg)
val dx1 = if (dxavg > 0.0) math.max(px(0) - min(xp), dxavg) else math.min(px(0) - max(xp), dxavg)
val x = grow(px(0) - dx1, px, px(px.size - 1) + dx0)
// compute k so that sinh(k*dx) is safe to compute
val dx = dif(x)
// Porting note: here tension is always a single value and x.size > 2.
val k = YUtils.max(YUtils.min(tension / math.abs(dxavg), divide(100.0, abs(dx))), divide(0.01, abs(dx)))
// extend y carefully so that linear extrapolation happens automatically
val k1 = k(1)
val k0 = k(k.size - 2)
val dydx1 = (py(1) - py(0)) / dx1
val kdx = k1 * dx1
val d2u = pd2ydx2(1)
val d2l = pd2ydx2(0)
val dydx1_2 = dydx1 + ((d2u - d2l * math.cosh(kdx)) / math.sinh(kdx) - (d2u - d2l) / kdx) / k1
val dydx0 = (py(py.size - 1) - py(py.size - 2)) / dx0
val kdx_2 = k0 * dx0
val d2u_2 = pd2ydx2(pd2ydx2.size - 1)
val d2l_2 = pd2ydx2(pd2ydx2.size - 2)
val dydx0_2 = dydx0 + ((d2u_2 * math.cosh(kdx_2) - d2l_2) / math.sinh(kdx_2) - (d2u_2 - d2l_2) / kdx_2) / k0
val y = grow(py(0) - dydx1_2 * dx1, py, py(py.size - 1) + dydx0_2 * dx0)
val d2ydx2 = grow(0.0, pd2ydx2, 0.0)
val xu = x(u).toDenseVector
val xl = x(l).toDenseVector
val dx_2 = xu :- xl
val dxl = xp - xl
val yl = y(l).toDenseVector
val dydx = (y(u).toDenseVector - yl) :/ dx_2
val km2 = divide(1.0, k :* k)
km2(0) = 0.0
km2(km2.size - 1) = 0.0
val km2_2 = km2(l).toDenseVector
val k_2 = k(l).toDenseVector
val skdx = sinh(k_2 :* dx_2)
val d2u_3 = d2ydx2(u).toDenseVector
val d2l_3 = d2ydx2(l).toDenseVector
val d3 = (km2_2 :* (d2u_3 - d2l_3)) :/ dx_2
val d2ydx2_2 = ((d2u_3 :* sinh(k_2 :* dxl)) :/ skdx) + (d2l_3 :* ((sinh(k_2 :* (xu - xp)) :/ skdx) - 1.0))
yl + (km2_2 :* d2ydx2_2) + ((dydx - d3) :* dxl)
}
}
| fnussber/ocs | bundle/edu.gemini.ags/src/main/scala/edu/gemini/ags/gems/mascot/util/Spline.scala | Scala | bsd-3-clause | 18,979 |
package cs4r.labs.learningscala.adventofcode
object AdventOfCode16 extends App {
val puzzleInput =
"""Sue 1: cars: 9, akitas: 3, goldfish: 0
|Sue 2: akitas: 9, children: 3, samoyeds: 9
|Sue 3: trees: 6, cars: 6, children: 4
|Sue 4: trees: 4, vizslas: 4, goldfish: 9
|Sue 5: akitas: 9, vizslas: 7, cars: 5
|Sue 6: vizslas: 6, goldfish: 6, akitas: 3
|Sue 7: pomeranians: 5, samoyeds: 0, perfumes: 10
|Sue 8: cars: 10, pomeranians: 7, goldfish: 8
|Sue 9: trees: 2, vizslas: 7, samoyeds: 6
|Sue 10: perfumes: 5, pomeranians: 4, children: 9
|Sue 11: vizslas: 5, perfumes: 8, cars: 10
|Sue 12: children: 10, cars: 6, perfumes: 5
|Sue 13: cats: 4, samoyeds: 7, pomeranians: 8
|Sue 14: perfumes: 6, goldfish: 10, children: 7
|Sue 15: perfumes: 4, pomeranians: 3, cars: 6
|Sue 16: perfumes: 7, cars: 9, pomeranians: 6
|Sue 17: goldfish: 3, cars: 6, vizslas: 7
|Sue 18: perfumes: 6, cars: 7, goldfish: 3
|Sue 19: trees: 0, akitas: 3, pomeranians: 8
|Sue 20: goldfish: 6, trees: 2, akitas: 6
|Sue 21: pomeranians: 9, akitas: 9, samoyeds: 9
|Sue 22: vizslas: 2, cars: 9, perfumes: 5
|Sue 23: goldfish: 10, samoyeds: 8, children: 9
|Sue 24: akitas: 4, goldfish: 1, vizslas: 5
|Sue 25: goldfish: 10, trees: 8, perfumes: 6
|Sue 26: vizslas: 5, akitas: 8, trees: 1
|Sue 27: trees: 3, cars: 6, perfumes: 2
|Sue 28: goldfish: 8, trees: 7, akitas: 10
|Sue 29: children: 5, trees: 1, goldfish: 10
|Sue 30: vizslas: 3, perfumes: 8, akitas: 3
|Sue 31: cars: 6, children: 10, perfumes: 7
|Sue 32: cars: 10, perfumes: 3, goldfish: 10
|Sue 33: perfumes: 9, vizslas: 3, akitas: 4
|Sue 34: perfumes: 10, vizslas: 7, children: 8
|Sue 35: cars: 5, perfumes: 5, vizslas: 9
|Sue 36: trees: 9, cars: 9, akitas: 7
|Sue 37: samoyeds: 9, perfumes: 2, cars: 10
|Sue 38: akitas: 7, cars: 5, trees: 5
|Sue 39: goldfish: 8, trees: 9, cars: 10
|Sue 40: trees: 0, cats: 1, pomeranians: 1
|Sue 41: pomeranians: 6, perfumes: 9, samoyeds: 1
|Sue 42: vizslas: 6, akitas: 3, pomeranians: 1
|Sue 43: vizslas: 2, perfumes: 3, pomeranians: 6
|Sue 44: akitas: 5, pomeranians: 0, vizslas: 10
|Sue 45: vizslas: 4, goldfish: 1, cars: 5
|Sue 46: cars: 4, vizslas: 8, cats: 0
|Sue 47: cats: 5, children: 8, pomeranians: 2
|Sue 48: vizslas: 3, perfumes: 6, cats: 0
|Sue 49: akitas: 7, perfumes: 0, trees: 7
|Sue 50: trees: 4, akitas: 10, vizslas: 2
|Sue 51: goldfish: 10, cars: 9, trees: 4
|Sue 52: cars: 5, children: 9, perfumes: 0
|Sue 53: vizslas: 5, cars: 3, cats: 8
|Sue 54: cars: 5, akitas: 1, goldfish: 10
|Sue 55: akitas: 10, vizslas: 2, cars: 6
|Sue 56: cats: 6, trees: 0, cars: 4
|Sue 57: vizslas: 1, akitas: 1, samoyeds: 7
|Sue 58: samoyeds: 6, vizslas: 1, akitas: 7
|Sue 59: akitas: 9, cars: 8, vizslas: 1
|Sue 60: cars: 6, vizslas: 7, goldfish: 0
|Sue 61: pomeranians: 5, akitas: 6, vizslas: 2
|Sue 62: samoyeds: 2, cats: 8, goldfish: 7
|Sue 63: vizslas: 10, goldfish: 7, samoyeds: 9
|Sue 64: perfumes: 2, trees: 1, akitas: 6
|Sue 65: cars: 8, perfumes: 10, vizslas: 9
|Sue 66: akitas: 8, vizslas: 8, perfumes: 8
|Sue 67: goldfish: 7, cars: 9, samoyeds: 9
|Sue 68: perfumes: 2, children: 7, akitas: 1
|Sue 69: perfumes: 7, vizslas: 9, akitas: 1
|Sue 70: samoyeds: 3, vizslas: 1, trees: 1
|Sue 71: vizslas: 8, goldfish: 7, trees: 9
|Sue 72: goldfish: 8, cars: 6, trees: 9
|Sue 73: perfumes: 5, cars: 10, samoyeds: 7
|Sue 74: pomeranians: 4, perfumes: 3, cars: 5
|Sue 75: samoyeds: 1, perfumes: 1, pomeranians: 1
|Sue 76: goldfish: 4, cats: 6, akitas: 7
|Sue 77: perfumes: 5, akitas: 4, vizslas: 8
|Sue 78: perfumes: 4, cats: 3, children: 4
|Sue 79: vizslas: 5, pomeranians: 9, samoyeds: 7
|Sue 80: cars: 3, samoyeds: 5, pomeranians: 7
|Sue 81: vizslas: 2, samoyeds: 4, perfumes: 2
|Sue 82: trees: 1, akitas: 10, vizslas: 9
|Sue 83: vizslas: 0, akitas: 2, samoyeds: 5
|Sue 84: perfumes: 5, vizslas: 7, children: 8
|Sue 85: cats: 3, children: 2, trees: 0
|Sue 86: cars: 3, perfumes: 2, goldfish: 2
|Sue 87: trees: 1, akitas: 7, vizslas: 0
|Sue 88: trees: 1, akitas: 2, samoyeds: 1
|Sue 89: cars: 4, vizslas: 8, akitas: 1
|Sue 90: perfumes: 5, cats: 3, vizslas: 0
|Sue 91: samoyeds: 7, cats: 6, goldfish: 8
|Sue 92: samoyeds: 10, cats: 0, cars: 7
|Sue 93: cars: 6, akitas: 7, samoyeds: 2
|Sue 94: perfumes: 0, goldfish: 6, trees: 9
|Sue 95: cars: 6, pomeranians: 2, samoyeds: 8
|Sue 96: cars: 2, trees: 9, samoyeds: 4
|Sue 97: goldfish: 5, trees: 1, children: 0
|Sue 98: akitas: 9, goldfish: 7, children: 6
|Sue 99: goldfish: 9, akitas: 0, pomeranians: 0
|Sue 100: samoyeds: 6, children: 8, vizslas: 5
|Sue 101: vizslas: 6, cars: 5, goldfish: 4
|Sue 102: vizslas: 6, akitas: 2, perfumes: 6
|Sue 103: samoyeds: 3, akitas: 7, children: 4
|Sue 104: cars: 3, perfumes: 10, cats: 6
|Sue 105: vizslas: 9, pomeranians: 0, cars: 1
|Sue 106: cats: 6, samoyeds: 8, pomeranians: 5
|Sue 107: cars: 7, trees: 4, akitas: 10
|Sue 108: perfumes: 3, vizslas: 1, goldfish: 9
|Sue 109: trees: 6, cars: 8, goldfish: 5
|Sue 110: pomeranians: 2, children: 1, vizslas: 7
|Sue 111: akitas: 0, vizslas: 8, cars: 0
|Sue 112: goldfish: 3, vizslas: 6, akitas: 2
|Sue 113: akitas: 10, pomeranians: 7, perfumes: 7
|Sue 114: cars: 10, cats: 2, vizslas: 8
|Sue 115: akitas: 8, trees: 1, vizslas: 2
|Sue 116: vizslas: 2, akitas: 7, perfumes: 1
|Sue 117: goldfish: 0, vizslas: 10, trees: 9
|Sue 118: trees: 3, cars: 0, goldfish: 0
|Sue 119: perfumes: 7, goldfish: 5, trees: 9
|Sue 120: children: 9, vizslas: 3, trees: 5
|Sue 121: vizslas: 1, goldfish: 7, akitas: 10
|Sue 122: perfumes: 1, cars: 6, trees: 1
|Sue 123: akitas: 2, vizslas: 0, goldfish: 7
|Sue 124: vizslas: 10, pomeranians: 7, akitas: 0
|Sue 125: perfumes: 4, cats: 5, vizslas: 2
|Sue 126: cars: 6, samoyeds: 8, akitas: 3
|Sue 127: trees: 9, goldfish: 7, akitas: 9
|Sue 128: cars: 8, trees: 0, perfumes: 2
|Sue 129: pomeranians: 7, vizslas: 2, perfumes: 6
|Sue 130: vizslas: 9, pomeranians: 3, trees: 6
|Sue 131: vizslas: 7, cars: 9, perfumes: 1
|Sue 132: akitas: 2, pomeranians: 9, vizslas: 7
|Sue 133: trees: 9, pomeranians: 10, samoyeds: 0
|Sue 134: children: 4, akitas: 10, perfumes: 4
|Sue 135: vizslas: 1, cats: 1, trees: 8
|Sue 136: samoyeds: 7, cars: 8, goldfish: 5
|Sue 137: perfumes: 0, children: 1, pomeranians: 10
|Sue 138: vizslas: 4, perfumes: 5, cars: 5
|Sue 139: trees: 2, perfumes: 8, goldfish: 0
|Sue 140: cars: 10, akitas: 5, goldfish: 7
|Sue 141: children: 4, trees: 3, goldfish: 8
|Sue 142: cars: 8, perfumes: 6, trees: 7
|Sue 143: akitas: 6, goldfish: 0, trees: 10
|Sue 144: akitas: 7, pomeranians: 10, perfumes: 10
|Sue 145: trees: 10, vizslas: 3, goldfish: 4
|Sue 146: samoyeds: 4, akitas: 3, perfumes: 6
|Sue 147: akitas: 8, perfumes: 2, pomeranians: 10
|Sue 148: cars: 2, perfumes: 0, goldfish: 8
|Sue 149: goldfish: 6, akitas: 7, perfumes: 6
|Sue 150: cars: 2, pomeranians: 5, perfumes: 4
|Sue 151: goldfish: 1, cars: 5, trees: 0
|Sue 152: pomeranians: 4, cars: 7, children: 1
|Sue 153: goldfish: 8, cars: 1, children: 10
|Sue 154: cars: 6, perfumes: 8, trees: 1
|Sue 155: akitas: 4, perfumes: 6, pomeranians: 2
|Sue 156: pomeranians: 5, cars: 4, akitas: 1
|Sue 157: cats: 5, cars: 9, goldfish: 8
|Sue 158: vizslas: 5, samoyeds: 1, children: 7
|Sue 159: vizslas: 1, perfumes: 3, akitas: 1
|Sue 160: goldfish: 10, pomeranians: 9, perfumes: 5
|Sue 161: samoyeds: 3, trees: 7, cars: 2
|Sue 162: cars: 2, pomeranians: 1, vizslas: 6
|Sue 163: vizslas: 3, perfumes: 5, akitas: 6
|Sue 164: vizslas: 1, trees: 0, akitas: 5
|Sue 165: vizslas: 5, cars: 6, pomeranians: 8
|Sue 166: cars: 10, perfumes: 2, trees: 9
|Sue 167: cars: 10, pomeranians: 6, perfumes: 4
|Sue 168: akitas: 7, trees: 10, goldfish: 7
|Sue 169: akitas: 1, perfumes: 10, cars: 10
|Sue 170: akitas: 5, samoyeds: 8, vizslas: 6
|Sue 171: children: 3, akitas: 2, vizslas: 3
|Sue 172: goldfish: 5, vizslas: 5, perfumes: 9
|Sue 173: perfumes: 5, goldfish: 10, trees: 5
|Sue 174: akitas: 5, vizslas: 2, children: 7
|Sue 175: perfumes: 5, cars: 7, samoyeds: 2
|Sue 176: cars: 8, vizslas: 10, akitas: 7
|Sue 177: perfumes: 7, children: 8, goldfish: 7
|Sue 178: cars: 1, pomeranians: 9, samoyeds: 0
|Sue 179: perfumes: 6, cars: 2, trees: 6
|Sue 180: trees: 3, vizslas: 7, children: 3
|Sue 181: vizslas: 8, samoyeds: 2, trees: 9
|Sue 182: perfumes: 3, cats: 1, children: 5
|Sue 183: akitas: 9, cats: 6, children: 3
|Sue 184: pomeranians: 9, cars: 6, perfumes: 8
|Sue 185: vizslas: 9, trees: 0, akitas: 9
|Sue 186: perfumes: 6, cars: 5, goldfish: 5
|Sue 187: perfumes: 4, cats: 7, vizslas: 2
|Sue 188: akitas: 7, cars: 4, children: 10
|Sue 189: akitas: 0, goldfish: 7, vizslas: 5
|Sue 190: akitas: 5, cars: 5, cats: 6
|Sue 191: cars: 6, children: 0, perfumes: 3
|Sue 192: cats: 2, perfumes: 10, goldfish: 7
|Sue 193: trees: 1, perfumes: 0, cars: 8
|Sue 194: perfumes: 9, children: 4, cats: 6
|Sue 195: akitas: 7, trees: 3, goldfish: 6
|Sue 196: goldfish: 8, cars: 8, samoyeds: 0
|Sue 197: cats: 0, akitas: 10, vizslas: 0
|Sue 198: goldfish: 1, perfumes: 3, cars: 8
|Sue 199: akitas: 10, vizslas: 5, samoyeds: 6
|Sue 200: pomeranians: 9, goldfish: 9, samoyeds: 7
|Sue 201: samoyeds: 0, goldfish: 7, akitas: 6
|Sue 202: vizslas: 0, goldfish: 2, akitas: 1
|Sue 203: goldfish: 3, children: 0, vizslas: 8
|Sue 204: cars: 8, trees: 2, perfumes: 2
|Sue 205: cars: 4, perfumes: 5, goldfish: 8
|Sue 206: vizslas: 3, trees: 2, akitas: 1
|Sue 207: cars: 7, goldfish: 5, trees: 1
|Sue 208: goldfish: 1, cars: 6, vizslas: 8
|Sue 209: cats: 4, trees: 1, children: 0
|Sue 210: cats: 10, children: 0, perfumes: 0
|Sue 211: cars: 4, pomeranians: 7, samoyeds: 5
|Sue 212: cars: 2, pomeranians: 10, trees: 1
|Sue 213: trees: 10, cats: 5, cars: 10
|Sue 214: perfumes: 5, trees: 1, vizslas: 1
|Sue 215: akitas: 10, vizslas: 8, samoyeds: 8
|Sue 216: vizslas: 2, cats: 5, pomeranians: 3
|Sue 217: akitas: 10, perfumes: 0, cats: 10
|Sue 218: trees: 8, cats: 5, vizslas: 2
|Sue 219: goldfish: 10, perfumes: 8, children: 2
|Sue 220: samoyeds: 9, trees: 8, vizslas: 7
|Sue 221: children: 7, trees: 6, cars: 6
|Sue 222: cats: 4, akitas: 5, pomeranians: 0
|Sue 223: trees: 8, goldfish: 2, perfumes: 8
|Sue 224: pomeranians: 9, cars: 8, akitas: 5
|Sue 225: akitas: 10, vizslas: 0, trees: 2
|Sue 226: akitas: 8, cats: 6, cars: 7
|Sue 227: trees: 1, akitas: 3, goldfish: 4
|Sue 228: pomeranians: 6, cats: 3, goldfish: 3
|Sue 229: trees: 10, perfumes: 3, vizslas: 7
|Sue 230: perfumes: 8, cars: 7, akitas: 0
|Sue 231: perfumes: 10, goldfish: 4, cars: 6
|Sue 232: goldfish: 7, trees: 3, cats: 2
|Sue 233: perfumes: 6, trees: 4, akitas: 4
|Sue 234: goldfish: 9, cats: 4, cars: 7
|Sue 235: pomeranians: 6, vizslas: 0, akitas: 6
|Sue 236: samoyeds: 5, cars: 5, children: 4
|Sue 237: vizslas: 10, cars: 4, goldfish: 4
|Sue 238: goldfish: 3, samoyeds: 7, akitas: 2
|Sue 239: cats: 8, children: 2, vizslas: 7
|Sue 240: cars: 9, perfumes: 4, trees: 9
|Sue 241: trees: 8, vizslas: 2, goldfish: 5
|Sue 242: cars: 6, trees: 3, vizslas: 3
|Sue 243: cats: 6, children: 7, cars: 4
|Sue 244: cats: 10, perfumes: 2, goldfish: 7
|Sue 245: akitas: 8, cats: 10, perfumes: 8
|Sue 246: vizslas: 8, akitas: 5, perfumes: 10
|Sue 247: goldfish: 2, vizslas: 5, akitas: 7
|Sue 248: akitas: 3, perfumes: 0, trees: 10
|Sue 249: cats: 4, vizslas: 5, pomeranians: 6
|Sue 250: children: 3, vizslas: 7, perfumes: 2
|Sue 251: cars: 0, pomeranians: 10, perfumes: 0
|Sue 252: akitas: 0, goldfish: 9, cars: 6
|Sue 253: perfumes: 7, cars: 4, samoyeds: 5
|Sue 254: akitas: 9, trees: 10, cars: 4
|Sue 255: samoyeds: 10, children: 6, akitas: 7
|Sue 256: trees: 8, goldfish: 8, perfumes: 8
|Sue 257: goldfish: 3, akitas: 2, perfumes: 6
|Sue 258: cats: 7, trees: 0, vizslas: 1
|Sue 259: perfumes: 7, cars: 7, akitas: 7
|Sue 260: goldfish: 0, vizslas: 0, samoyeds: 2
|Sue 261: vizslas: 2, children: 2, cats: 3
|Sue 262: vizslas: 2, pomeranians: 9, samoyeds: 3
|Sue 263: cats: 1, akitas: 3, vizslas: 1
|Sue 264: pomeranians: 10, trees: 2, goldfish: 7
|Sue 265: samoyeds: 5, trees: 7, perfumes: 4
|Sue 266: perfumes: 10, cars: 1, pomeranians: 3
|Sue 267: trees: 6, goldfish: 1, cars: 0
|Sue 268: cars: 6, samoyeds: 4, pomeranians: 5
|Sue 269: goldfish: 3, vizslas: 3, akitas: 3
|Sue 270: children: 5, cats: 0, cars: 4
|Sue 271: goldfish: 3, perfumes: 8, pomeranians: 7
|Sue 272: samoyeds: 6, cars: 7, perfumes: 10
|Sue 273: trees: 4, cars: 2, vizslas: 7
|Sue 274: samoyeds: 10, perfumes: 9, goldfish: 6
|Sue 275: cars: 4, trees: 2, perfumes: 7
|Sue 276: akitas: 3, perfumes: 9, cars: 9
|Sue 277: akitas: 8, vizslas: 2, cats: 6
|Sue 278: trees: 5, goldfish: 7, akitas: 3
|Sue 279: perfumes: 9, cars: 8, vizslas: 2
|Sue 280: trees: 3, vizslas: 0, children: 0
|Sue 281: cars: 7, trees: 2, cats: 5
|Sue 282: vizslas: 4, cars: 10, cats: 3
|Sue 283: akitas: 10, cats: 3, samoyeds: 9
|Sue 284: trees: 7, children: 5, goldfish: 6
|Sue 285: cars: 2, perfumes: 5, cats: 7
|Sue 286: samoyeds: 5, trees: 10, goldfish: 6
|Sue 287: goldfish: 10, perfumes: 4, trees: 7
|Sue 288: vizslas: 9, trees: 9, perfumes: 0
|Sue 289: trees: 4, goldfish: 9, vizslas: 8
|Sue 290: vizslas: 3, cars: 3, trees: 2
|Sue 291: goldfish: 2, akitas: 2, trees: 2
|Sue 292: children: 1, cars: 0, vizslas: 5
|Sue 293: trees: 5, akitas: 4, goldfish: 6
|Sue 294: akitas: 3, vizslas: 7, pomeranians: 5
|Sue 295: goldfish: 10, vizslas: 3, trees: 1
|Sue 296: cars: 2, trees: 1, akitas: 0
|Sue 297: akitas: 10, vizslas: 6, samoyeds: 2
|Sue 298: children: 5, trees: 1, samoyeds: 9
|Sue 299: perfumes: 9, trees: 6, vizslas: 1
|Sue 300: akitas: 7, pomeranians: 6, vizslas: 6
|Sue 301: cats: 7, children: 6, vizslas: 7
|Sue 302: trees: 2, vizslas: 7, samoyeds: 4
|Sue 303: goldfish: 0, samoyeds: 10, cars: 4
|Sue 304: pomeranians: 9, children: 3, vizslas: 5
|Sue 305: akitas: 8, vizslas: 4, cars: 5
|Sue 306: akitas: 0, perfumes: 2, pomeranians: 10
|Sue 307: akitas: 9, cars: 0, trees: 2
|Sue 308: vizslas: 10, goldfish: 8, akitas: 6
|Sue 309: trees: 0, cats: 6, perfumes: 2
|Sue 310: vizslas: 10, cars: 1, trees: 4
|Sue 311: goldfish: 8, perfumes: 6, cats: 3
|Sue 312: goldfish: 0, children: 1, akitas: 2
|Sue 313: pomeranians: 10, trees: 6, samoyeds: 6
|Sue 314: vizslas: 5, akitas: 4, pomeranians: 2
|Sue 315: goldfish: 7, trees: 0, akitas: 5
|Sue 316: goldfish: 4, vizslas: 5, cars: 7
|Sue 317: perfumes: 7, cats: 10, cars: 4
|Sue 318: samoyeds: 10, cars: 9, trees: 7
|Sue 319: pomeranians: 8, vizslas: 6, cars: 3
|Sue 320: cars: 4, cats: 9, akitas: 4
|Sue 321: cars: 6, trees: 2, perfumes: 6
|Sue 322: goldfish: 1, cats: 2, perfumes: 4
|Sue 323: akitas: 6, cats: 5, cars: 8
|Sue 324: cats: 4, vizslas: 9, akitas: 0
|Sue 325: children: 8, samoyeds: 9, trees: 4
|Sue 326: vizslas: 2, samoyeds: 10, perfumes: 7
|Sue 327: goldfish: 7, pomeranians: 4, akitas: 10
|Sue 328: perfumes: 8, cats: 4, akitas: 10
|Sue 329: trees: 0, cars: 9, goldfish: 3
|Sue 330: trees: 5, samoyeds: 7, perfumes: 8
|Sue 331: cars: 4, perfumes: 2, goldfish: 0
|Sue 332: vizslas: 4, pomeranians: 7, akitas: 1
|Sue 333: akitas: 4, goldfish: 3, perfumes: 0
|Sue 334: samoyeds: 3, akitas: 10, vizslas: 0
|Sue 335: goldfish: 1, akitas: 7, vizslas: 6
|Sue 336: perfumes: 1, goldfish: 1, pomeranians: 8
|Sue 337: children: 5, cars: 4, cats: 4
|Sue 338: vizslas: 5, cars: 10, cats: 3
|Sue 339: trees: 2, goldfish: 3, cars: 1
|Sue 340: trees: 10, goldfish: 6, perfumes: 2
|Sue 341: akitas: 5, trees: 6, cats: 3
|Sue 342: cars: 10, children: 8, goldfish: 0
|Sue 343: cats: 2, akitas: 0, pomeranians: 4
|Sue 344: perfumes: 1, vizslas: 3, cars: 3
|Sue 345: samoyeds: 8, cats: 5, perfumes: 8
|Sue 346: cars: 5, akitas: 10, trees: 2
|Sue 347: vizslas: 9, akitas: 9, cars: 3
|Sue 348: cars: 3, perfumes: 1, pomeranians: 9
|Sue 349: akitas: 1, cars: 4, perfumes: 0
|Sue 350: perfumes: 8, vizslas: 2, trees: 6
|Sue 351: pomeranians: 5, akitas: 9, cats: 8
|Sue 352: pomeranians: 8, vizslas: 3, goldfish: 10
|Sue 353: trees: 2, pomeranians: 0, goldfish: 6
|Sue 354: cats: 5, akitas: 7, goldfish: 6
|Sue 355: goldfish: 6, children: 4, trees: 10
|Sue 356: children: 1, trees: 3, akitas: 7
|Sue 357: trees: 2, samoyeds: 10, goldfish: 3
|Sue 358: samoyeds: 10, cats: 0, goldfish: 0
|Sue 359: perfumes: 3, children: 6, pomeranians: 1
|Sue 360: cars: 10, pomeranians: 1, samoyeds: 5
|Sue 361: samoyeds: 9, pomeranians: 7, perfumes: 6
|Sue 362: goldfish: 6, trees: 8, perfumes: 9
|Sue 363: samoyeds: 10, pomeranians: 9, children: 10
|Sue 364: perfumes: 3, goldfish: 7, cars: 9
|Sue 365: cats: 3, children: 4, samoyeds: 8
|Sue 366: trees: 0, cars: 10, vizslas: 10
|Sue 367: pomeranians: 10, children: 8, perfumes: 2
|Sue 368: cars: 5, vizslas: 0, samoyeds: 3
|Sue 369: trees: 1, goldfish: 8, cars: 8
|Sue 370: vizslas: 0, cars: 2, perfumes: 5
|Sue 371: trees: 2, cars: 3, vizslas: 8
|Sue 372: trees: 10, children: 9, cats: 1
|Sue 373: pomeranians: 3, perfumes: 1, vizslas: 0
|Sue 374: vizslas: 0, perfumes: 6, trees: 0
|Sue 375: vizslas: 7, pomeranians: 1, akitas: 10
|Sue 376: vizslas: 8, trees: 2, cars: 10
|Sue 377: perfumes: 9, cats: 5, goldfish: 5
|Sue 378: cats: 0, akitas: 10, perfumes: 9
|Sue 379: cars: 4, akitas: 1, trees: 1
|Sue 380: cars: 4, perfumes: 5, trees: 3
|Sue 381: goldfish: 3, akitas: 5, samoyeds: 9
|Sue 382: goldfish: 7, perfumes: 5, trees: 5
|Sue 383: akitas: 4, cats: 6, cars: 8
|Sue 384: children: 6, goldfish: 10, akitas: 7
|Sue 385: akitas: 7, vizslas: 5, perfumes: 10
|Sue 386: children: 7, vizslas: 10, akitas: 10
|Sue 387: goldfish: 6, akitas: 7, trees: 2
|Sue 388: vizslas: 6, trees: 1, akitas: 2
|Sue 389: cars: 5, vizslas: 3, akitas: 7
|Sue 390: vizslas: 4, cats: 8, perfumes: 7
|Sue 391: akitas: 3, trees: 0, children: 2
|Sue 392: cats: 7, cars: 3, children: 9
|Sue 393: trees: 10, vizslas: 3, goldfish: 7
|Sue 394: perfumes: 0, goldfish: 7, akitas: 4
|Sue 395: cats: 6, cars: 7, vizslas: 0
|Sue 396: vizslas: 4, perfumes: 6, goldfish: 5
|Sue 397: pomeranians: 8, trees: 1, akitas: 9
|Sue 398: goldfish: 7, pomeranians: 6, samoyeds: 9
|Sue 399: perfumes: 10, cars: 1, trees: 8
|Sue 400: trees: 0, goldfish: 9, children: 6
|Sue 401: trees: 1, cars: 6, pomeranians: 8
|Sue 402: perfumes: 9, cars: 0, vizslas: 10
|Sue 403: samoyeds: 4, akitas: 1, vizslas: 9
|Sue 404: perfumes: 0, trees: 2, cars: 4
|Sue 405: akitas: 0, perfumes: 5, samoyeds: 4
|Sue 406: akitas: 8, vizslas: 6, children: 2
|Sue 407: children: 1, trees: 8, goldfish: 10
|Sue 408: pomeranians: 4, trees: 10, cars: 9
|Sue 409: perfumes: 5, vizslas: 5, akitas: 4
|Sue 410: trees: 1, akitas: 10, vizslas: 6
|Sue 411: samoyeds: 0, goldfish: 9, perfumes: 7
|Sue 412: goldfish: 7, samoyeds: 10, trees: 1
|Sue 413: samoyeds: 0, pomeranians: 10, vizslas: 6
|Sue 414: children: 2, cars: 10, samoyeds: 2
|Sue 415: trees: 2, goldfish: 8, cars: 0
|Sue 416: samoyeds: 4, goldfish: 9, trees: 2
|Sue 417: trees: 8, akitas: 10, perfumes: 3
|Sue 418: samoyeds: 9, goldfish: 2, cars: 1
|Sue 419: akitas: 2, perfumes: 8, trees: 2
|Sue 420: children: 3, goldfish: 6, perfumes: 5
|Sue 421: akitas: 8, perfumes: 2, samoyeds: 6
|Sue 422: vizslas: 10, akitas: 4, pomeranians: 3
|Sue 423: cats: 8, perfumes: 3, trees: 4
|Sue 424: cars: 2, children: 4, pomeranians: 8
|Sue 425: pomeranians: 4, samoyeds: 2, goldfish: 4
|Sue 426: perfumes: 6, cars: 4, goldfish: 4
|Sue 427: akitas: 0, goldfish: 7, perfumes: 5
|Sue 428: perfumes: 4, cars: 3, akitas: 5
|Sue 429: trees: 0, vizslas: 0, goldfish: 1
|Sue 430: perfumes: 4, vizslas: 2, cars: 7
|Sue 431: goldfish: 7, pomeranians: 8, trees: 0
|Sue 432: goldfish: 7, children: 9, trees: 3
|Sue 433: akitas: 1, vizslas: 10, trees: 2
|Sue 434: perfumes: 2, cars: 4, goldfish: 10
|Sue 435: pomeranians: 6, vizslas: 9, trees: 1
|Sue 436: cars: 9, trees: 0, goldfish: 0
|Sue 437: trees: 1, goldfish: 1, vizslas: 8
|Sue 438: goldfish: 7, samoyeds: 8, children: 2
|Sue 439: children: 1, cats: 7, vizslas: 8
|Sue 440: cats: 2, pomeranians: 6, goldfish: 4
|Sue 441: perfumes: 7, cats: 3, vizslas: 6
|Sue 442: akitas: 4, samoyeds: 5, cars: 2
|Sue 443: akitas: 3, perfumes: 3, cats: 9
|Sue 444: perfumes: 10, akitas: 6, trees: 0
|Sue 445: cars: 5, children: 9, perfumes: 8
|Sue 446: vizslas: 10, cars: 3, perfumes: 5
|Sue 447: children: 9, perfumes: 1, cars: 10
|Sue 448: akitas: 0, goldfish: 8, trees: 3
|Sue 449: cars: 7, akitas: 8, children: 3
|Sue 450: cars: 4, akitas: 9, cats: 0
|Sue 451: perfumes: 4, samoyeds: 5, goldfish: 6
|Sue 452: perfumes: 10, akitas: 1, cars: 7
|Sue 453: trees: 1, goldfish: 3, vizslas: 6
|Sue 454: goldfish: 8, pomeranians: 6, trees: 10
|Sue 455: akitas: 5, vizslas: 8, goldfish: 10
|Sue 456: cats: 5, trees: 4, samoyeds: 0
|Sue 457: perfumes: 8, cars: 0, cats: 3
|Sue 458: akitas: 1, trees: 10, vizslas: 2
|Sue 459: vizslas: 6, akitas: 3, children: 10
|Sue 460: perfumes: 7, trees: 9, goldfish: 8
|Sue 461: children: 6, vizslas: 4, perfumes: 5
|Sue 462: vizslas: 6, akitas: 8, perfumes: 9
|Sue 463: goldfish: 8, cars: 4, trees: 10
|Sue 464: pomeranians: 8, cars: 5, vizslas: 0
|Sue 465: cats: 10, goldfish: 7, akitas: 1
|Sue 466: cats: 2, children: 1, cars: 6
|Sue 467: perfumes: 3, samoyeds: 6, cars: 0
|Sue 468: samoyeds: 10, pomeranians: 6, trees: 2
|Sue 469: children: 2, perfumes: 2, pomeranians: 4
|Sue 470: cats: 1, perfumes: 5, vizslas: 9
|Sue 471: vizslas: 5, perfumes: 2, akitas: 7
|Sue 472: samoyeds: 8, goldfish: 6, cats: 1
|Sue 473: goldfish: 10, perfumes: 9, cars: 4
|Sue 474: samoyeds: 0, cars: 4, vizslas: 4
|Sue 475: trees: 2, cars: 7, akitas: 8
|Sue 476: vizslas: 3, perfumes: 5, goldfish: 1
|Sue 477: cats: 7, cars: 4, trees: 1
|Sue 478: vizslas: 8, akitas: 3, goldfish: 0
|Sue 479: cars: 6, cats: 3, perfumes: 2
|Sue 480: goldfish: 1, children: 9, vizslas: 3
|Sue 481: pomeranians: 5, vizslas: 1, cars: 10
|Sue 482: children: 5, perfumes: 5, cats: 1
|Sue 483: perfumes: 2, goldfish: 7, trees: 6
|Sue 484: akitas: 2, goldfish: 4, perfumes: 10
|Sue 485: samoyeds: 3, goldfish: 0, akitas: 1
|Sue 486: trees: 8, vizslas: 9, goldfish: 0
|Sue 487: goldfish: 8, samoyeds: 0, trees: 0
|Sue 488: perfumes: 7, cars: 5, trees: 0
|Sue 489: vizslas: 3, pomeranians: 2, perfumes: 5
|Sue 490: cars: 5, perfumes: 5, akitas: 5
|Sue 491: children: 8, trees: 1, pomeranians: 4
|Sue 492: pomeranians: 0, akitas: 1, vizslas: 8
|Sue 493: akitas: 10, perfumes: 10, samoyeds: 8
|Sue 494: perfumes: 6, vizslas: 4, cats: 6
|Sue 495: children: 6, pomeranians: 5, samoyeds: 4
|Sue 496: vizslas: 1, trees: 5, akitas: 1
|Sue 497: vizslas: 10, perfumes: 10, pomeranians: 3
|Sue 498: samoyeds: 3, trees: 2, cars: 5
|Sue 499: cats: 6, children: 3, perfumes: 0
|Sue 500: pomeranians: 10, cats: 3, vizslas: 5""".stripMargin
val idPattern = "\\\\Sue (\\\\d+): (.*)".r
val listOfAunts = puzzleInput.split("\\n").map(line => {
val idPattern(auntId, info) = line
Aunt(auntId.toInt, info)
});
val refAunt = Aunt(100000, "children: 3, cats: 7, samoyeds: 2, pomeranians: 3, akitas: 0,vizslas: 0, goldfish: 5, trees: 3, cars: 2, perfumes: 1")
case class Aunt(val id: Int, sueInfo: String) {
val info: Map[String, Int] = sueInfo.split(",").map(pair => {
val key = pair.split(":")(0).trim
val value = pair.split(":")(1).trim
(key, value.toInt)
}).toMap
private val possibleKeys = List("children", "cats", "samoyeds", "pomeranians", "akitas", "vizslas", "goldfish", "trees", "cars", "perfumes")
def similarity1(other: Aunt): Int = {
possibleKeys.map( key => {
if(info.get(key).isDefined && other.info.get(key).isDefined) {
if(info.get(key) == other.info.get(key)) 1
else 0
}else 0
}).sum
}
def similarity2(other: Aunt): Int = {
possibleKeys.map( key => {
if(info.get(key).isDefined && other.info.get(key).isDefined) {
if (List("cats", "trees").contains(key) ) {
if(other.info.get(key).get > this.info.get(key).get) 1
else 0
} else if (List("pomeranians", "goldfish").contains(key) ) {
if(other.info.get(key).get < this.info.get(key).get) 1
else 0
}
else if(info.get(key) == other.info.get(key)) 1
else 0
}else 0
}).sum
}
}
val partA = listOfAunts.map(aunt =>
(aunt, refAunt.similarity1(aunt))
).maxBy(_._2)._1
println(partA)
val partB = listOfAunts.map(aunt =>
(aunt, refAunt.similarity2(aunt))
).maxBy(_._2)._1
println(partB)
}
| Cs4r/AdventOfCode | src/main/scala/cs4r/labs/learningscala/adventofcode/AdventOfCode16.scala | Scala | gpl-3.0 | 26,997 |
package demo.components.reactsplitpane
import japgolly.scalajs.react._
import japgolly.scalajs.react.vdom.html_<^._
object ReactSplitPaneInfo {
val component = ScalaComponent
.builder[Unit]("ReactSplitPaneInfo")
.render(P => {
<.div(^.cls := "full-width-section")(
<.h3("React Split Pane :"),
<.p("Wrapper for the react-split-pane component (0.1.66)"),
<.a(
^.href := "https://github.com/tomkp/react-split-pane",
"react-split-pane on GitHub"
)
)
})
.build
def apply() = component()
}
| chandu0101/scalajs-react-components | demo/src/main/scala/demo/components/reactsplitpane/ReactSplitPaneInfo.scala | Scala | apache-2.0 | 572 |
package com.twitter.finagle.mysql.integration
import com.twitter.finagle.Mysql
import com.twitter.finagle.param
import com.twitter.finagle.tracing._
import com.twitter.util.Await
import org.junit.runner.RunWith
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
@RunWith(classOf[JUnitRunner])
class MysqlBuilderTest extends FunSuite with IntegrationClient {
test("clients have granular tracing") {
Trace.enable()
var annotations: List[Annotation] = Nil
val mockTracer = new Tracer {
def record(record: Record) = {
annotations ::= record.annotation
}
def sampleTrace(traceId: TraceId): Option[Boolean] = Some(true)
}
// if we have a local instance of mysql running.
if (isAvailable) {
val username = p.getProperty("username", "<user>")
val password = p.getProperty("password", null)
val db = p.getProperty("db", "test")
val client = Mysql.client
.configured(param.Label("myclient"))
.configured(param.Tracer(mockTracer))
.withDatabase("test")
.withCredentials(username, password)
.withDatabase(db)
.newRichClient("localhost:3306")
Await.ready(client.query("SELECT 1"))
Await.ready(client.prepare("SELECT ?")(1))
Await.ready(client.ping())
val mysqlTraces = annotations.collect {
case Annotation.BinaryAnnotation("mysql.query", "SELECT 1") => ()
case Annotation.BinaryAnnotation("mysql.prepare", "SELECT ?") => ()
case Annotation.Message("mysql.PingRequest") => ()
}
assert(mysqlTraces.nonEmpty, "missing traces")
}
}
}
| koshelev/finagle | finagle-mysql/src/test/scala/com/twitter/finagle/mysql/integration/MysqlBuilderTest.scala | Scala | apache-2.0 | 1,633 |
/*******************************************************************************
Copyright (c) 2013-2014, S-Core, KAIST.
All rights reserved.
Use is subject to license terms.
This distribution may include materials developed by third parties.
******************************************************************************/
package kr.ac.kaist.jsaf.analysis.typing.models.Tizen
import kr.ac.kaist.jsaf.analysis.typing.AddressManager._
import kr.ac.kaist.jsaf.analysis.cfg.{CFG, CFGExpr, InternalError}
import kr.ac.kaist.jsaf.analysis.typing.domain.{BoolFalse => F, BoolTrue => T, _}
import kr.ac.kaist.jsaf.analysis.typing.models._
import kr.ac.kaist.jsaf.analysis.typing._
import kr.ac.kaist.jsaf.analysis.typing.domain.Heap
import kr.ac.kaist.jsaf.analysis.typing.domain.Context
import kr.ac.kaist.jsaf.analysis.typing.models.builtin.BuiltinArray
object TIZENseService extends Tizen {
private val name = "seService"
/* predefined locations */
val loc_obj = TIZENtizen.loc_seService
val loc_proto = newSystemRecentLoc(name + "Proto")
val loc_reader: Loc = newSystemLoc("Reader", Old)
val loc_readerarr: Loc = newSystemLoc("ReaderArr", Old)
val loc_session: Loc = newSystemLoc("Session", Old)
val loc_channel: Loc = newSystemLoc("Channel", Old)
override def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List(
(loc_obj, prop_obj), (loc_proto, prop_proto), (loc_reader, prop_reader_ins), (loc_readerarr, prop_readerarr_ins),
(loc_session, prop_session_ins), (loc_channel, prop_channel_ins)
)
/* constructor or object*/
private val prop_obj: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(loc_proto), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T)))
)
/* prototype */
private val prop_proto: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("CallbackObject")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("getReaders", AbsBuiltinFunc("tizen.seService.getReaders",2)),
("registerSEListener", AbsBuiltinFunc("tizen.seService.registerSEListener",1)),
("unregisterSEListener", AbsBuiltinFunc("tizen.seService.unregisterSEListener",1)),
("shutdown", AbsBuiltinFunc("tizen.seService.shutdown",0))
)
private val prop_reader_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(TIZENReader.loc_proto, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("isPresent", AbsConstValue(PropValue(ObjectValue(Value(BoolTop), F, T, T))))
)
private val prop_readerarr_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Array")))),
("@proto", AbsConstValue(PropValue(ObjectValue(BuiltinArray.ProtoLoc, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("length", AbsConstValue(PropValue(ObjectValue(UInt, T, F, F)))),
(Str_default_number, AbsConstValue(PropValue(ObjectValue(Value(loc_reader), T, T, T))))
)
private val prop_session_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(TIZENSession.loc_proto, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("isClosed", AbsConstValue(PropValue(ObjectValue(Value(BoolTop), F, T, T))))
)
private val prop_channel_ins: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("Object")))),
("@proto", AbsConstValue(PropValue(ObjectValue(TIZENChannel.loc_proto, F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("isBasicChannel", AbsConstValue(PropValue(ObjectValue(Value(BoolTop), F, T, T))))
)
override def getSemanticMap(): Map[String, SemanticFun] = {
Map(
("tizen.seService.getReaders" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val l_r1 = addrToLoc(addr1, Recent)
val l_r2 = addrToLoc(addr2, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val v_1 = getArgValue(h_2, ctx_2, args, "0")
val n_arglen = Operator.ToUInt32(getArgValue(h_2, ctx_2, args, "length"))
val es_1 =
if (v_1._2.exists((l) => Helper.IsCallable(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(TIZENseService.loc_readerarr), T, T, T)))
val h_3 = h_2.update(l_r1, o_arr)
val h_4 = TizenHelper.addCallbackHandler(h_3, AbsString.alpha("ReaderArrSuccessCB"), Value(v_1._2), Value(l_r1))
val (h_5, es_2) = AbsNumber.getUIntSingle(n_arglen) match {
case Some(n) if n == 1 =>
(h_4, TizenHelper.TizenExceptionBot)
case Some(n) if n >= 2 =>
val v_2 = getArgValue(h_4, ctx_2, args, "1")
val es_2 =
if (v_2._2.exists((l) => Helper.IsCallable(h_4, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr2 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_5 = h_4.update(l_r2, o_arr2)
val h_6 = TizenHelper.addCallbackHandler(h_5, AbsString.alpha("errorCB"), Value(v_2._2), Value(l_r2))
(h_6, es_2)
case _ => (HeapBot, TizenHelper.TizenExceptionBot)
}
val est = Set[WebAPIException](SecurityError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es_1 ++ es_2 ++ est)
((h_5, ctx_2), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.seService.registerSEListener" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val l_r1 = addrToLoc(addr1, Recent)
val l_r2 = addrToLoc(addr2, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val v_1 = getArgValue(h_2, ctx_2, args, "0")
val (h_3, es) = v_1._2.foldLeft((h_2, TizenHelper.TizenExceptionBot))((_he, l) => {
val v1 = Helper.Proto(_he._1, l, AbsString.alpha("onSEReady"))
val v2 = Helper.Proto(_he._1, l, AbsString.alpha("onSENotReady"))
val es1 =
if (v1._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es2 =
if (v2._2.exists((l) => Helper.IsCallable(_he._1, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(TIZENseService.loc_reader), T, T, T)))
val o_arr1 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(TIZENseService.loc_reader), T, T, T)))
val h_4 = _he._1.update(l_r1, o_arr).update(l_r2, o_arr1)
val h_5 = TizenHelper.addCallbackHandler(h_4, AbsString.alpha("SEChangeListener.onSEReady"), Value(v1._2), Value(l_r1))
val h_6 = TizenHelper.addCallbackHandler(h_5, AbsString.alpha("SEChangeListener.onSENotReady"), Value(v2._2), Value(l_r2))
(h_6, _he._2 ++ es1 ++ es2)
})
val est = Set[WebAPIException](SecurityError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ est)
((Helper.ReturnStore(h_3, Value(UInt)), ctx_2), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.seService.unregisterSEListener" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val v = getArgValue(h, ctx, args, "0")
val es =
if (v._1._4 </NumTop)
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val est = Set[WebAPIException](SecurityError, UnknownError, InvalidValuesError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es ++ est)
((h, ctx), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.seService.shutdown" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val est = Set[WebAPIException](SecurityError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, est)
((h, ctx), (he + h_e, ctxe + ctx_e))
}
))
)
}
override def getPreSemanticMap(): Map[String, SemanticFun] = {Map()}
override def getDefMap(): Map[String, AccessFun] = {Map()}
override def getUseMap(): Map[String, AccessFun] = {Map()}
}
object TIZENReader extends Tizen {
private val name = "Reader"
/* predefined locations */
val loc_proto = newSystemRecentLoc(name + "Proto")
override def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List(
(loc_proto, prop_proto)
)
/* prototype */
private val prop_proto: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("CallbackObject")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("getName", AbsBuiltinFunc("tizen.Reader.getName",0)),
("openSession", AbsBuiltinFunc("tizen.Reader.openSession",2)),
("closeSessions", AbsBuiltinFunc("tizen.Reader.closeSessions",0))
)
override def getSemanticMap(): Map[String, SemanticFun] = {
Map(
("tizen.Reader.getName" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val est = Set[WebAPIException](SecurityError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, est)
((Helper.ReturnStore(h, Value(StrTop)), ctx), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.Reader.openSession" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val l_r1 = addrToLoc(addr1, Recent)
val l_r2 = addrToLoc(addr2, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val v_1 = getArgValue(h_2, ctx_2, args, "0")
val n_arglen = Operator.ToUInt32(getArgValue(h_2, ctx_2, args, "length"))
val es_1 =
if (v_1._2.exists((l) => Helper.IsCallable(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(TIZENseService.loc_session), T, T, T)))
val h_3 = h_2.update(l_r1, o_arr)
val h_4 = TizenHelper.addCallbackHandler(h_3, AbsString.alpha("SessionSuccessCB"), Value(v_1._2), Value(l_r1))
val (h_5, es_2) = AbsNumber.getUIntSingle(n_arglen) match {
case Some(n) if n == 1 =>
(h_4, TizenHelper.TizenExceptionBot)
case Some(n) if n >= 2 =>
val v_2 = getArgValue(h_4, ctx_2, args, "1")
val es_2 =
if (v_2._2.exists((l) => Helper.IsCallable(h_4, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr2 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_IOerr) ++ LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_5 = h_4.update(l_r2, o_arr2)
val h_6 = TizenHelper.addCallbackHandler(h_5, AbsString.alpha("errorCB"), Value(v_2._2), Value(l_r2))
(h_6, es_2)
case _ => (HeapBot, TizenHelper.TizenExceptionBot)
}
val est = Set[WebAPIException](SecurityError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es_1 ++ es_2 ++ est)
((h_5, ctx_2), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.Reader.closeSessions" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val est = Set[WebAPIException](SecurityError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, est)
((h, ctx), (he + h_e, ctxe + ctx_e))
}
))
)
}
override def getPreSemanticMap(): Map[String, SemanticFun] = {Map()}
override def getDefMap(): Map[String, AccessFun] = {Map()}
override def getUseMap(): Map[String, AccessFun] = {Map()}
}
object TIZENSession extends Tizen {
private val name = "Session"
/* predefined locations */
val loc_proto = newSystemRecentLoc(name + "Proto")
override def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List(
(loc_proto, prop_proto)
)
/* prototype */
private val prop_proto: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("CallbackObject")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("openBasicChannel", AbsBuiltinFunc("tizen.Session.openBasicChannel",3)),
("openLogicalChannel", AbsBuiltinFunc("tizen.Session.openLogicalChannel",3)),
("getATR", AbsBuiltinFunc("tizen.Session.getATR",0)),
("close", AbsBuiltinFunc("tizen.Session.close",0)),
("closeChannels", AbsBuiltinFunc("tizen.Session.closeChannels",0))
)
override def getSemanticMap(): Map[String, SemanticFun] = {
Map(
("tizen.Session.openBasicChannel" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val l_r1 = addrToLoc(addr1, Recent)
val l_r2 = addrToLoc(addr2, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val v_1 = getArgValue(h_2, ctx_2, args, "0")
val v_2 = getArgValue(h_2, ctx_2, args, "1")
val n_arglen = Operator.ToUInt32(getArgValue(h_2, ctx_2, args, "length"))
val es_1 =
if (v_1._2.exists((l) => Helper.IsArray(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_2 = v_1._2.foldLeft(TizenHelper.TizenExceptionBot)((_es, ll) => {
val n_length = Operator.ToUInt32(Helper.Proto(h_2, ll, AbsString.alpha("length")))
val ess = n_length.getAbsCase match {
case AbsBot =>
TizenHelper.TizenExceptionBot
case _ => AbsNumber.getUIntSingle(n_length) match {
case Some(n) => {
val es__ = (0 until n.toInt).foldLeft(TizenHelper.TizenExceptionBot)((_e, i) => {
val vi = Helper.Proto(h_2, ll, AbsString.alpha(i.toString))
val esi =
if (vi._1._4 </ NumTop) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
_e ++ esi
})
es__
}
case _ => {
val vi = Helper.Proto(h_2, ll, AbsString.alpha(Str_default_number))
val esi =
if (vi._1._4 </ NumTop) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
esi
}
}
}
_es ++ ess
})
val es_3 =
if (v_2._2.exists((l) => Helper.IsCallable(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(TIZENseService.loc_channel), T, T, T)))
val h_3 = h_2.update(l_r1, o_arr)
val h_4 = TizenHelper.addCallbackHandler(h_3, AbsString.alpha("ChannelSuccessCB"), Value(v_2._2), Value(l_r1))
val (h_5, es_4) = AbsNumber.getUIntSingle(n_arglen) match {
case Some(n) if n == 2 => (h_4, TizenHelper.TizenExceptionBot)
case Some(n) if n >= 3 =>
val v_3 = getArgValue(h_4, ctx_2, args, "2")
val es1 =
if (v_3._2.exists((l) => Helper.IsCallable(h_4, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr1 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_5 = h_4.update(l_r2, o_arr1)
val h_6 = TizenHelper.addCallbackHandler(h_5, AbsString.alpha("errorCB"), Value(v_3._2), Value(l_r2))
(h_6, es1)
case _ =>
(HeapBot, TizenHelper.TizenExceptionBot)
}
val est = Set[WebAPIException](SecurityError, InvalidValuesError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es_1 ++ es_2 ++ es_3 ++ es_4 ++ est)
((h_5, ctx_2), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.Session.openLogicalChannel" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val l_r1 = addrToLoc(addr1, Recent)
val l_r2 = addrToLoc(addr2, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val v_1 = getArgValue(h_2, ctx_2, args, "0")
val v_2 = getArgValue(h_2, ctx_2, args, "1")
val n_arglen = Operator.ToUInt32(getArgValue(h_2, ctx_2, args, "length"))
val es_1 =
if (v_1._2.exists((l) => Helper.IsArray(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_2 = v_1._2.foldLeft(TizenHelper.TizenExceptionBot)((_es, ll) => {
val n_length = Operator.ToUInt32(Helper.Proto(h_2, ll, AbsString.alpha("length")))
val ess = n_length.getAbsCase match {
case AbsBot =>
TizenHelper.TizenExceptionBot
case _ => AbsNumber.getUIntSingle(n_length) match {
case Some(n) => {
val es__ = (0 until n.toInt).foldLeft(TizenHelper.TizenExceptionBot)((_e, i) => {
val vi = Helper.Proto(h_2, ll, AbsString.alpha(i.toString))
val esi =
if (vi._1._4 </ NumTop) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
_e ++ esi
})
es__
}
case _ => {
val vi = Helper.Proto(h_2, ll, AbsString.alpha(Str_default_number))
val esi =
if (vi._1._4 </ NumTop) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
esi
}
}
}
_es ++ ess
})
val es_3 =
if (v_2._2.exists((l) => Helper.IsCallable(h_2, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(TIZENseService.loc_channel), T, T, T)))
val h_3 = h_2.update(l_r1, o_arr)
val h_4 = TizenHelper.addCallbackHandler(h_3, AbsString.alpha("ChannelSuccessCB"), Value(v_2._2), Value(l_r1))
val (h_5, es_4) = AbsNumber.getUIntSingle(n_arglen) match {
case Some(n) if n == 2 => (h_4, TizenHelper.TizenExceptionBot)
case Some(n) if n >= 3 =>
val v_3 = getArgValue(h_4, ctx_2, args, "2")
val es1 =
if (v_3._2.exists((l) => Helper.IsCallable(h_4, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr1 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_5 = h_4.update(l_r2, o_arr1)
val h_6 = TizenHelper.addCallbackHandler(h_5, AbsString.alpha("errorCB"), Value(v_3._2), Value(l_r2))
(h_6, es1)
case _ =>
(HeapBot, TizenHelper.TizenExceptionBot)
}
val est = Set[WebAPIException](SecurityError, InvalidValuesError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es_1 ++ es_2 ++ es_3 ++ es_4 ++ est)
((h_5, ctx_2), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.Session.getATR" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val l_r1 = addrToLoc(addr1, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val o_arr = Helper.NewArrayObject(UInt).
update(Str_default_number, PropValue(ObjectValue(Value(NumTop), T, T, T)))
val h_2 = h_1.update(l_r1, o_arr)
val est = Set[WebAPIException](SecurityError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, est)
((Helper.ReturnStore(h_2, Value(l_r1)), ctx_1), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.Session.close" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val est = Set[WebAPIException](SecurityError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, est)
((h, ctx), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.Session.closeChannels" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val est = Set[WebAPIException](SecurityError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, est)
((h, ctx), (he + h_e, ctxe + ctx_e))
}
))
)
}
override def getPreSemanticMap(): Map[String, SemanticFun] = {Map()}
override def getDefMap(): Map[String, AccessFun] = {Map()}
override def getUseMap(): Map[String, AccessFun] = {Map()}
}
object TIZENChannel extends Tizen {
private val name = "Channel"
/* predefined locations */
val loc_proto = newSystemRecentLoc(name + "Proto")
override def getInitList(): List[(Loc, List[(String, AbsProperty)])] = List(
(loc_proto, prop_proto)
)
/* prototype */
private val prop_proto: List[(String, AbsProperty)] = List(
("@class", AbsConstValue(PropValue(AbsString.alpha("CallbackObject")))),
("@proto", AbsConstValue(PropValue(ObjectValue(Value(ObjProtoLoc), F, F, F)))),
("@extensible", AbsConstValue(PropValue(T))),
("close", AbsBuiltinFunc("tizen.Channel.close",0)),
("transmit", AbsBuiltinFunc("tizen.Channel.transmit",3))
)
override def getSemanticMap(): Map[String, SemanticFun] = {
Map(
("tizen.Channel.close" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val est = Set[WebAPIException](SecurityError, UnknownError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, est)
((h, ctx), (he + h_e, ctxe + ctx_e))
}
)),
("tizen.Channel.transmit" -> (
(sem: Semantics, h: Heap, ctx: Context, he: Heap, ctxe: Context, cp: ControlPoint, cfg: CFG, fun: String, args: CFGExpr) => {
val lset_env = h(SinglePureLocalLoc)("@env")._2._2
val set_addr = lset_env.foldLeft[Set[Address]](Set())((a, l) => a + locToAddr(l))
if (set_addr.size > 1) throw new InternalError("API heap allocation: Size of env address is " + set_addr.size)
val addr_env = (cp._1._1, set_addr.head)
val addr1 = cfg.getAPIAddress(addr_env, 0)
val addr2 = cfg.getAPIAddress(addr_env, 1)
val addr3 = cfg.getAPIAddress(addr_env, 2)
val l_r1 = addrToLoc(addr1, Recent)
val l_r2 = addrToLoc(addr2, Recent)
val l_r3 = addrToLoc(addr3, Recent)
val (h_1, ctx_1) = Helper.Oldify(h, ctx, addr1)
val (h_2, ctx_2) = Helper.Oldify(h_1, ctx_1, addr2)
val (h_3, ctx_3) = Helper.Oldify(h_2, ctx_2, addr3)
val v_1 = getArgValue(h_3, ctx_3, args, "0")
val v_2 = getArgValue(h_3, ctx_3, args, "1")
val n_arglen = Operator.ToUInt32(getArgValue(h_3, ctx_3, args, "length"))
val es_1 =
if (v_1._2.exists((l) => Helper.IsArray(h_3, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val es_2 = v_1._2.foldLeft(TizenHelper.TizenExceptionBot)((_es, ll) => {
val n_length = Operator.ToUInt32(Helper.Proto(h_3, ll, AbsString.alpha("length")))
val ess = n_length.getAbsCase match {
case AbsBot =>
TizenHelper.TizenExceptionBot
case _ => AbsNumber.getUIntSingle(n_length) match {
case Some(n) => {
val es__ = (0 until n.toInt).foldLeft(TizenHelper.TizenExceptionBot)((_e, i) => {
val vi = Helper.Proto(h_3, ll, AbsString.alpha(i.toString))
val esi =
if (vi._1._4 </ NumTop) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
_e ++ esi
})
es__
}
case _ => {
val vi = Helper.Proto(h_3, ll, AbsString.alpha(Str_default_number))
val esi =
if (vi._1._4 </ NumTop) Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
esi
}
}
}
_es ++ ess
})
val es_3 =
if (v_2._2.exists((l) => Helper.IsCallable(h_3, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr = Helper.NewArrayObject(UInt).
update(Str_default_number, PropValue(ObjectValue(Value(UInt), T, T, T)))
val h_4 = h_3.update(l_r1, o_arr)
val o_arr1 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(l_r1), T, T, T)))
val h_5 = h_4.update(l_r2, o_arr1)
val h_6 = TizenHelper.addCallbackHandler(h_5, AbsString.alpha("TransmitSuccessCB"), Value(v_2._2), Value(l_r2))
val (h_7, es_4) = AbsNumber.getUIntSingle(n_arglen) match {
case Some(n) if n == 2 => (h_6, TizenHelper.TizenExceptionBot)
case Some(n) if n >= 3 =>
val v_3 = getArgValue(h_6, ctx_3, args, "2")
val es1 =
if (v_3._2.exists((l) => Helper.IsCallable(h_6, l) <= F))
Set[WebAPIException](TypeMismatchError)
else TizenHelper.TizenExceptionBot
val o_arr2 = Helper.NewArrayObject(AbsNumber.alpha(1)).
update("0", PropValue(ObjectValue(Value(LocSet(TIZENtizen.loc_securityerr) ++ LocSet(TIZENtizen.loc_IOerr) ++ LocSet(TIZENtizen.loc_invalidValueserr) ++ LocSet(TIZENtizen.loc_unknownerr)), T, T, T)))
val h_7 = h_6.update(l_r3, o_arr2)
val h_8 = TizenHelper.addCallbackHandler(h_7, AbsString.alpha("errorCB"), Value(v_3._2), Value(l_r3))
(h_8, es1)
case _ =>
(HeapBot, TizenHelper.TizenExceptionBot)
}
val est = Set[WebAPIException](SecurityError)
val (h_e, ctx_e) = TizenHelper.TizenRaiseException(h, ctx, es_1 ++ es_2 ++ es_3 ++ es_4 ++ est)
((h_7, ctx_3), (he + h_e, ctxe + ctx_e))
}
))
)
}
override def getPreSemanticMap(): Map[String, SemanticFun] = {Map()}
override def getDefMap(): Map[String, AccessFun] = {Map()}
override def getUseMap(): Map[String, AccessFun] = {Map()}
}
| darkrsw/safe | src/main/scala/kr/ac/kaist/jsaf/analysis/typing/models/Tizen/TIZENseService.scala | Scala | bsd-3-clause | 32,598 |
package diameter.Dictionary
/**
* Created by edzmbuh on 21/03/2016.
*/
abstract class GenericDictionary extends Iterable[DictionaryObject]
//trait AvpDictionary extends DictionaryBase with Iterable[AvpHeader]
//trait VendorDictionary extends DictionaryBase with Iterable[DictionaryVendor]
//trait CommandDictionary extends DictionaryBase with Iterable[CommandDictionary]
//trait ApplicationDictionary extends DictionaryBase with Iterable[Application]
| dbuhryk/DiameterCoder | Diameter/src/main/scala/diameter/Dictionary/GenericDictionary.scala | Scala | mit | 460 |
package org.keycloak.gatling
import java.util.concurrent.atomic.AtomicInteger
import java.util.concurrent.{Executors, ThreadFactory}
import io.gatling.core.akka.GatlingActorSystem
import io.gatling.core.validation.Success
/**
* @author Radim Vansa <[email protected]>
*/
object Blocking {
GatlingActorSystem.instance.registerOnTermination(() => shutdown())
private val threadPool = Executors.newCachedThreadPool(new ThreadFactory {
val counter = new AtomicInteger();
override def newThread(r: Runnable): Thread =
new Thread(r, "blocking-thread-" + counter.incrementAndGet())
})
def apply(f: () => Unit) = {
threadPool.execute(new Runnable() {
override def run = {
f()
}
})
Success(())
}
def shutdown() = {
threadPool.shutdownNow()
}
}
| keycloak/keycloak | testsuite/performance/tests/src/main/scala/org/keycloak/gatling/Blocking.scala | Scala | apache-2.0 | 816 |
package typeclasses
trait Show {
def show: String
}
object Show {
implicit class ShowInt(x: Int) extends Show {
lazy val show: String = x.toString
}
}
| earldouglas/xsbt-web-plugin | src/sbt-test/container/multi-module-single-webapp/typeclasses/src/main/scala/show.scala | Scala | bsd-3-clause | 165 |
package bad.robot.temperature
import io.circe._
object SensorReading {
implicit def jsonEncoder: Encoder[SensorReading] = new Encoder[SensorReading] {
def apply(sensor: SensorReading): Json = Json.obj(
("name", Json.fromString(sensor.name)),
("temperature", Temperature.encoder(sensor.temperature))
)
}
implicit def jsonDecoder: Decoder[SensorReading] = {
Decoder(cursor => for {
name <- cursor.get[String]("name")
temperature <- cursor.get[Temperature]("temperature")
} yield SensorReading(name, temperature))
}
implicit class ListSensorTemperatureOps(temperatures: List[SensorReading]) {
def average: SensorReading = temperatures match {
case Nil => SensorReading("Unknown", Temperature(0.0))
case sensor :: Nil => SensorReading(sensor.name, sensor.temperature)
case _ => SensorReading("Average", temperatures.map(_.temperature).reduce(_ + _) / temperatures.length)
}
}
}
case class SensorReading(name: String, temperature: Temperature)
| tobyweston/temperature-machine | src/main/scala/bad/robot/temperature/SensorReading.scala | Scala | apache-2.0 | 1,051 |
package org.bitcoins.wallet.models
import org.bitcoins.core.api.wallet.db.{
AddressRecord,
ScriptPubKeyDb,
SegWitAddressDb
}
import org.bitcoins.core.hd.SegWitHDPath
import org.bitcoins.core.protocol.Bech32Address
import org.bitcoins.core.protocol.script.{
EmptyScriptWitness,
P2WPKHWitnessSPKV0
}
import org.bitcoins.crypto.ECPublicKey
import org.bitcoins.testkit.fixtures.WalletDAOFixture
import org.bitcoins.testkit.wallet.WalletTestUtil
import java.sql.SQLException
class AddressDAOTest extends WalletDAOFixture {
behavior of "AddressDAO"
it should "preserve public key scripts" in { daos =>
val addressDAO = daos.addressDAO
val addr1 = WalletTestUtil.getAddressDb(WalletTestUtil.firstAccountDb,
addressIndex = 0)
val addr2 = WalletTestUtil.getAddressDb(WalletTestUtil.firstAccountDb,
addressIndex = 1)
assert(addr1.scriptPubKey != addr2.scriptPubKey)
for {
created1 <- addressDAO.create(addr1)
created2 <- addressDAO.create(addr2)
found <- addressDAO.findAllAddresses()
} yield {
assert(addr1 == created1)
assert(addr2 == created2)
assert(
Vector(addr1, addr2).sortBy(_.address.toString) == found.sortBy(
_.address.toString))
}
}
it should "fail to insert and read an address into the database without a corresponding public key script" in {
daos =>
val addressDAO = daos.addressDAO
val readF = {
val addressDb =
WalletTestUtil.getAddressDb(WalletTestUtil.firstAccountDb)
addressDAO.create(AddressRecord.fromAddressDb(addressDb, -1))
}
recoverToSucceededIf[SQLException](readF)
}
it should "insert and read an address into the database with a corresponding public key script" in {
daos =>
val addressDAO = daos.addressDAO
for {
createdAddress <- {
val addressDb =
WalletTestUtil.getAddressDb(WalletTestUtil.firstAccountDb)
addressDAO.create(addressDb)
}
readAddress <- {
addressDAO.findAddress(createdAddress.address)
}
} yield assert(readAddress.contains(createdAddress))
}
it should "find by script pub key" in { daos =>
val addressDAO = daos.addressDAO
val addr1 = WalletTestUtil.getAddressDb(WalletTestUtil.firstAccountDb)
val addr2 = WalletTestUtil.getAddressDb(WalletTestUtil.firstAccountDb,
addressIndex = 1)
val addr3 = WalletTestUtil.getAddressDb(WalletTestUtil.firstAccountDb,
addressIndex = 2)
val spks = Vector(addr1.scriptPubKey, addr2.scriptPubKey)
for {
created1 <- addressDAO.create(addr1)
created2 <- addressDAO.create(addr2)
created3 <- addressDAO.create(addr3)
found <- addressDAO.findByScriptPubKeys(spks)
} yield {
assert(found.contains(created1))
assert(found.contains(created2))
assert(!found.contains(created3))
}
}
it must "insert an address into the database whose script is already being watched" in {
daos =>
val spkDAO = daos.scriptPubKeyDAO
val addressDAO = daos.addressDAO
val addrStr = "bc1qfjex5a4m5w0atqrpwad3zj4vkfkuhun46tge9c"
val address = Bech32Address.fromString(addrStr)
val spk = address.scriptPubKey.asInstanceOf[P2WPKHWitnessSPKV0]
//insert the script first
val spkDb = ScriptPubKeyDb(address.scriptPubKey)
val createdSpkF = spkDAO.create(spkDb)
//now try to insert the address in the database
val segwitHdPath: SegWitHDPath =
SegWitHDPath.fromString("m/84'/0'/0'/0/0")
val pubKey: ECPublicKey = ECPublicKey.freshPublicKey
val addressDb = SegWitAddressDb.apply(segwitHdPath,
pubKey,
spk.pubKeyHash,
address,
EmptyScriptWitness,
spk)
for {
createdSpk <- createdSpkF
_ <- addressDAO.create(addressDb)
//make sure we can find it now
foundOpt <- addressDAO.read(address)
} yield {
assert(foundOpt.isDefined)
assert(foundOpt.get.scriptPubKeyId == createdSpk.id.get)
assert(foundOpt.get.address == address)
}
}
it must "upsert an address into the database whose script is already being watched" in {
daos =>
val spkDAO = daos.scriptPubKeyDAO
val addressDAO = daos.addressDAO
val addrStr = "bc1qfjex5a4m5w0atqrpwad3zj4vkfkuhun46tge9c"
val address = Bech32Address.fromString(addrStr)
val spk = address.scriptPubKey.asInstanceOf[P2WPKHWitnessSPKV0]
//insert the script first
val spkDb = ScriptPubKeyDb(address.scriptPubKey)
val createdSpkF = spkDAO.create(spkDb)
//now try to insert the address in the database
val segwitHdPath: SegWitHDPath =
SegWitHDPath.fromString("m/84'/0'/0'/0/0")
val pubKey: ECPublicKey = ECPublicKey.freshPublicKey
val addressDb = SegWitAddressDb.apply(segwitHdPath,
pubKey,
spk.pubKeyHash,
address,
EmptyScriptWitness,
spk)
for {
createdSpk <- createdSpkF
_ <- addressDAO.upsert(addressDb)
//make sure we can find it now
foundOpt <- addressDAO.read(address)
} yield {
assert(foundOpt.isDefined)
assert(foundOpt.get.scriptPubKeyId == createdSpk.id.get)
assert(foundOpt.get.address == address)
}
}
}
| bitcoin-s/bitcoin-s | wallet-test/src/test/scala/org/bitcoins/wallet/models/AddressDAOTest.scala | Scala | mit | 5,853 |
/**
* Name: JsonRender.scala
* Creation: 25.11.2015
* Author: Mikael Mayer ([email protected])
* Comments: Json specifications
*/
import leon.lang._
import leon.annotation._
import leon.collection._
import leon.collection.ListOps._
import leon.lang.synthesis._
object JsonRender {
abstract class JSON
abstract class JCompositeValue extends JSON
abstract class JFinalValue extends JSON
case class JDict(values: Map[String, JSON]) extends JCompositeValue
case class JArray(values: List[JSON]) extends JCompositeValue
case class JString(value: String) extends JFinalValue
case class JBoolean(value: Boolean) extends JFinalValue
case class JInt(value: Int) extends JFinalValue
def JStringToString(j: JString) = "\\"" + StrOps.escape(j.value) + "\\""
def test = json_render(JDict(Map("title" -> JString("\\"test\\""), "flags" -> JArray(List(JInt(1), JBoolean(true))))))
/** Synthesize this function by example to have a JSON serializer. */
def json_render(j: JSON): String = ???[String]
} | epfl-lara/leon | testcases/web/synthesis/27_String_Json.scala | Scala | gpl-3.0 | 1,033 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.oap.adapter
import org.apache.spark.internal.config.ConfigBuilder
import org.apache.spark.sql.internal.SQLConf
object SqlConfAdapter {
def buildConf(key: String): ConfigBuilder = SQLConf.buildConf(key)
}
| Intel-bigdata/OAP | oap-cache/oap/src/main/scala/org/apache/spark/sql/oap/adapter/SqlConfAdapter.scala | Scala | apache-2.0 | 1,039 |
package chandu0101.scalajs.rn
import chandu0101.scalajs.rn.apis._
import japgolly.scalajs.react._
import scala.scalajs.js
import scala.scalajs.js.Object
trait ReactNative extends js.Object {
//components
val Text: js.Object = js.native
val View: js.Object = js.native
val TextInput: js.Object = js.native
val TouchableWithoutFeedback: js.Object = js.native
val TouchableHighlight: js.Object = js.native
val TouchableOpacity: js.Object = js.native
val ActivityIndicatorIOS: js.Object = js.native
val DatePickerIOS: js.Object = js.native
val Image: js.Object = js.native
val ScrollView: js.Object = js.native
val ListView: js.Object = js.native
val MapView: js.Object = js.native
val Navigator: js.Object = js.native
val NavigatorIOS: js.Object = js.native
val PickerIOS: js.Object = js.native
val SliderIOS: js.Object = js.native
val SwitchIOS: js.Object = js.native
val TabBarItemIOS: js.Object = js.native
val WebView: js.Object = js.native
val TabBarIOS: js.Object = js.native
val SegmentedControlIOS: js.Object = js.native
// apis
val AlertIOS: AlertIOS = js.native
val AppRegistry: AppRegistry = js.native
val StyleSheet: StyleSheet = js.native
val AppStateIOS: AppStateIOS = js.native
val AsyncStorage: AsyncStorageJS = js.native
val CameraRoll: CameraRoll = js.native
val InteractionManager: InteractionManager = js.native
val LinkingIOS: LinkingIOS = js.native
val NetInfo: NetInfo = js.native
val LayoutAnimation: js.Dynamic = js.native
val PixelRatio: PixelRatio = js.native
val PushNotificationIOS: PushNotificationIOS = js.native
val PanResponder: PanResponder = js.native
val StatusBarIOS: js.Dynamic = js.native
val VibrationIOS: VibrationIOS = js.native
val Dimensions: js.Dynamic = js.native
def createClass[P, S, B, N <: TopNode](spec: ReactComponentSpec[P, S, B, N]): ReactComponentType[P, S, B, N] = js.native
def createClass(spec: js.Object): js.Dynamic = js.native
def createFactory[P, S, B, N <: TopNode](t: ReactComponentType[P, S, B, N]): ReactComponentCU[P, S, B, N] = js.native
def createFactory(c: js.Object): js.Dynamic = js.native
def createElement[P, S, B, N <: TopNode](t: ReactComponentType[P, S, B, N]): ReactComponentCU[P, S, B, N] = js.native
def createElement(tag: String, props: Object, children: ReactNode*): ReactDOMElement = js.native
def createElement(tag: js.Object, props: Object, children: ReactNode*): ReactDOMElement = js.native
val addons: js.Dynamic = js.native
def findNodeHandle(ref : js.Any):js.Object = js.native
}
| beni55/scalajs-react-native | core/src/main/scala/chandu0101/scalajs/rn/ReactNative.scala | Scala | apache-2.0 | 2,581 |
package masterleague4s
package codec
import io.circe.parser.decode
import org.specs2._
import masterleague4s.data._
import Serialized._
import FDecoders._
class TournamentResultDecoderSpec extends Specification {
def is = s2"""
The TournamentResult decoder should
parse page 1 $parse35
"""
def page1string = """{
"count": 42,
"next": "https://api.masterleague.net/tournaments/?format=json&page=2",
"previous": null,
"results": [
{
"id": 35,
"name": "HGC Europe - Open Division",
"start_date": "2017-01-16",
"end_date": "2017-10-15",
"region": 1,
"url": "https://masterleague.net/tournament/35/",
"stages": [
{
"id": 208,
"name": "Cup 9"
},
{
"id": 203,
"name": "Cup 8"
},
{
"id": 192,
"name": "Phase 1 Playoffs"
},
{
"id": 187,
"name": "Cup 7"
},
{
"id": 181,
"name": "Cup 6"
},
{
"id": 177,
"name": "Cup 5"
},
{
"id": 174,
"name": "Cup 4"
},
{
"id": 171,
"name": "Cup 3"
},
{
"id": 169,
"name": "Cup 2"
},
{
"id": 165,
"name": "Cup 1"
}
]
},
{
"id": 36,
"name": "HGC North America - Open Division",
"start_date": "2017-01-24",
"end_date": "2017-10-15",
"region": 2,
"url": "https://masterleague.net/tournament/36/",
"stages": [
{
"id": 210,
"name": "Cup 9"
},
{
"id": 204,
"name": "Cup 8"
},
{
"id": 193,
"name": "Phase 1 Playoffs"
},
{
"id": 191,
"name": "Cup 7"
},
{
"id": 182,
"name": "Cup 6"
},
{
"id": 179,
"name": "Cup 5"
},
{
"id": 176,
"name": "Cup 4"
},
{
"id": 173,
"name": "Cup 3"
},
{
"id": 170,
"name": "Cup 2"
},
{
"id": 166,
"name": "Cup 1"
}
]
},
{
"id": 43,
"name": "HGC Europe - Phase 2",
"start_date": "2017-06-23",
"end_date": "2017-10-08",
"region": 1,
"url": "https://masterleague.net/tournament/43/",
"stages": [
{
"id": 205,
"name": "Group stage 1"
}
]
},
{
"id": 44,
"name": "HGC North America - Phase 2",
"start_date": "2017-06-23",
"end_date": "2017-10-08",
"region": 2,
"url": "https://masterleague.net/tournament/44/",
"stages": [
{
"id": 206,
"name": "Group stage 1"
}
]
},
{
"id": 45,
"name": "HGC Korea - Phase 2",
"start_date": "2017-06-23",
"end_date": "2017-10-08",
"region": 3,
"url": "https://masterleague.net/tournament/45/",
"stages": [
{
"id": 207,
"name": "Group stage 1"
}
]
},
{
"id": 46,
"name": "HGC China - Phase 2",
"start_date": "2017-06-26",
"end_date": "2017-10-08",
"region": 4,
"url": "https://masterleague.net/tournament/46/",
"stages": [
{
"id": 209,
"name": "Group stage 1"
}
]
},
{
"id": 42,
"name": "Mid-Season Brawl",
"start_date": "2017-06-10",
"end_date": "2017-06-19",
"region": null,
"url": "https://masterleague.net/tournament/42/",
"stages": [
{
"id": 202,
"name": "Playoffs"
},
{
"id": 201,
"name": "Group B"
},
{
"id": 200,
"name": "Group A"
}
]
},
{
"id": 32,
"name": "HGC North America - Phase 1",
"start_date": "2016-11-21",
"end_date": "2017-05-28",
"region": 2,
"url": "https://masterleague.net/tournament/32/",
"stages": [
{
"id": 198,
"name": "HGC Crucible"
},
{
"id": 196,
"name": "Playoffs"
},
{
"id": 190,
"name": "Standings"
},
{
"id": 185,
"name": "Group stage 2"
},
{
"id": 168,
"name": "Group stage 1"
},
{
"id": 147,
"name": "Qualifier #3"
},
{
"id": 146,
"name": "Qualifier #2"
},
{
"id": 145,
"name": "Qualifier #1"
}
]
},
{
"id": 33,
"name": "HGC Europe - Phase 1",
"start_date": "2016-11-19",
"end_date": "2017-05-28",
"region": 1,
"url": "https://masterleague.net/tournament/33/",
"stages": [
{
"id": 197,
"name": "HGC Crucible"
},
{
"id": 194,
"name": "Playoffs"
},
{
"id": 189,
"name": "Standings"
},
{
"id": 184,
"name": "Group stage 2"
},
{
"id": 167,
"name": "Group stage 1"
},
{
"id": 144,
"name": "Qualifier #3"
},
{
"id": 143,
"name": "Qualifier #2"
},
{
"id": 142,
"name": "Qualifier #1"
}
]
},
{
"id": 37,
"name": "HGC Korea - Phase 1",
"start_date": "2017-02-03",
"end_date": "2017-05-28",
"region": 3,
"url": "https://masterleague.net/tournament/37/",
"stages": [
{
"id": 199,
"name": "HGC Crucible"
},
{
"id": 195,
"name": "Playoffs"
},
{
"id": 188,
"name": "Standings"
},
{
"id": 186,
"name": "Group stage 2"
},
{
"id": 172,
"name": "Group stage 1"
}
]
},
{
"id": 40,
"name": "HGC China - Phase 1",
"start_date": "2017-04-03",
"end_date": "2017-05-28",
"region": 4,
"url": "https://masterleague.net/tournament/40/",
"stages": [
{
"id": 180,
"name": "Group stage"
}
]
},
{
"id": 41,
"name": "King of the Storm",
"start_date": "2017-03-15",
"end_date": "2017-04-09",
"region": 1,
"url": "https://masterleague.net/tournament/41/",
"stages": [
{
"id": 183,
"name": "Playoff"
}
]
},
{
"id": 39,
"name": "Eastern Clash",
"start_date": "2017-03-17",
"end_date": "2017-03-19",
"region": null,
"url": "https://masterleague.net/tournament/39/",
"stages": [
{
"id": 178,
"name": "Playoff"
}
]
},
{
"id": 38,
"name": "Western Clash",
"start_date": "2017-03-03",
"end_date": "2017-03-05",
"region": null,
"url": "https://masterleague.net/tournament/38/",
"stages": [
{
"id": 175,
"name": "Playoff"
}
]
},
{
"id": 34,
"name": "Gold Series 2016 - Grand Finals",
"start_date": "2016-12-16",
"end_date": "2017-01-08",
"region": 4,
"url": "https://masterleague.net/tournament/34/",
"stages": [
{
"id": 163,
"name": "Playoffs"
},
{
"id": 164,
"name": "Tiebreak"
},
{
"id": 161,
"name": "Group stage"
}
]
},
{
"id": 19,
"name": "ZOTAC Cup",
"start_date": "2015-03-07",
"end_date": "2016-12-17",
"region": 1,
"url": "https://masterleague.net/tournament/19/",
"stages": [
{
"id": 159,
"name": "Monthly #21"
},
{
"id": 158,
"name": "Weekly #70"
},
{
"id": 157,
"name": "Weekly #69"
},
{
"id": 154,
"name": "Monthly #20"
},
{
"id": 150,
"name": "Weekly #68"
},
{
"id": 152,
"name": "Monthly #19"
},
{
"id": 149,
"name": "Weekly #67"
},
{
"id": 148,
"name": "Weekly #66"
},
{
"id": 141,
"name": "Weekly #65"
},
{
"id": 140,
"name": "Weekly #64"
},
{
"id": 139,
"name": "Weekly #63"
},
{
"id": 138,
"name": "Weekly #62"
},
{
"id": 135,
"name": "Monthly #18"
},
{
"id": 127,
"name": "Weekly #61"
},
{
"id": 124,
"name": "Weekly #60"
},
{
"id": 121,
"name": "Weekly #59"
},
{
"id": 117,
"name": "Monthly #17"
},
{
"id": 115,
"name": "Weekly #58"
},
{
"id": 105,
"name": "Weekly #57"
},
{
"id": 104,
"name": "Weekly #56"
},
{
"id": 100,
"name": "Monthly #16"
},
{
"id": 93,
"name": "Weekly #55"
},
{
"id": 91,
"name": "Weekly #54"
},
{
"id": 89,
"name": "Weekly #53"
},
{
"id": 88,
"name": "Weekly #52"
},
{
"id": 78,
"name": "Monthly #15"
},
{
"id": 92,
"name": "Weekly #51"
},
{
"id": 77,
"name": "Monthly #14"
}
]
},
{
"id": 31,
"name": "Gold Club World Championship",
"start_date": "2016-11-11",
"end_date": "2016-12-04",
"region": null,
"url": "https://masterleague.net/tournament/31/",
"stages": [
{
"id": 156,
"name": "Playoffs"
},
{
"id": 155,
"name": "Group Stage"
},
{
"id": 153,
"name": "Chinese Wildcard"
},
{
"id": 151,
"name": "Global Wildcard"
}
]
},
{
"id": 30,
"name": "Fall Global Championship",
"start_date": "2016-10-26",
"end_date": "2016-11-05",
"region": null,
"url": "https://masterleague.net/tournament/30/",
"stages": [
{
"id": 134,
"name": "Playoffs"
},
{
"id": 133,
"name": "Group Stage #2 - Group B"
},
{
"id": 132,
"name": "Group Stage #2 - Group A"
},
{
"id": 131,
"name": "Group Stage #1 - Group B"
},
{
"id": 130,
"name": "Group Stage #1 - Group A"
}
]
},
{
"id": 28,
"name": "North America Nexus Games",
"start_date": "2016-09-23",
"end_date": "2016-10-16",
"region": 2,
"url": "https://masterleague.net/tournament/28/",
"stages": [
{
"id": 137,
"name": "Playoffs"
},
{
"id": 123,
"name": "Qualifier"
}
]
},
{
"id": 27,
"name": "Europe Nexus Games",
"start_date": "2016-09-09",
"end_date": "2016-10-09",
"region": 1,
"url": "https://masterleague.net/tournament/27/",
"stages": [
{
"id": 136,
"name": "Playoffs"
},
{
"id": 122,
"name": "Qualifier (Top 16)"
}
]
}
]
}"""
def parse35 = {
val parseResult = decode[UriApiResult[IdTournament]](page1string)
parseResult.isRight must beTrue
}
}
| martijnhoekstra/masterleague4s | src/test/scala/TournamentResultDecoder.scala | Scala | gpl-3.0 | 7,733 |
package pl.touk.nussknacker.engine.definition
import pl.touk.nussknacker.engine.ModelData
object SignalDispatcher {
def dispatchSignal(modelData: ModelData)(signalType: String, processId: String, parameters: Map[String, AnyRef]): Option[Unit] = {
modelData.processWithObjectsDefinition.signalsWithTransformers.get(signalType).map { case (signalFactory, _) =>
modelData.withThisAsContextClassLoader {
signalFactory.invokeMethod(parameters, None, List(processId))
}
()
}
}
} | TouK/nussknacker | interpreter/src/main/scala/pl/touk/nussknacker/engine/definition/SignalDispatcher.scala | Scala | apache-2.0 | 513 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.table.catalog
import org.apache.flink.api.scala.ExecutionEnvironment
import org.apache.flink.streaming.api.scala.StreamExecutionEnvironment
import org.apache.flink.table.api.scala.{BatchTableEnvironment, StreamTableEnvironment}
import org.apache.flink.table.api.{TableEnvironment, ValidationException}
import org.apache.flink.table.factories.utils.TestCollectionTableFactory
import org.apache.flink.types.Row
import org.junit.Assert.{assertEquals, fail}
import org.junit.runner.RunWith
import org.junit.runners.Parameterized
import org.junit.{Before, Ignore, Test}
import java.util
import org.apache.flink.test.util.AbstractTestBase
import scala.collection.JavaConversions._
/** Test cases for catalog table. */
@RunWith(classOf[Parameterized])
class CatalogTableITCase(isStreaming: Boolean) extends AbstractTestBase {
private val batchExec: ExecutionEnvironment = ExecutionEnvironment.getExecutionEnvironment
private var batchEnv: BatchTableEnvironment = _
private val streamExec: StreamExecutionEnvironment = StreamExecutionEnvironment
.getExecutionEnvironment
private var streamEnv: StreamTableEnvironment = _
private val SOURCE_DATA = List(
toRow(1, "a"),
toRow(2, "b"),
toRow(3, "c")
)
private val DIM_DATA = List(
toRow(1, "aDim"),
toRow(2, "bDim"),
toRow(3, "cDim")
)
implicit def rowOrdering: Ordering[Row] = Ordering.by((r : Row) => {
val builder = new StringBuilder
0 until r.getArity foreach(idx => builder.append(r.getField(idx)))
builder.toString()
})
@Before
def before(): Unit = {
batchExec.setParallelism(4)
streamExec.setParallelism(4)
batchEnv = BatchTableEnvironment.create(batchExec)
streamEnv = StreamTableEnvironment.create(streamExec)
TestCollectionTableFactory.reset()
TestCollectionTableFactory.isStreaming = isStreaming
}
def toRow(args: Any*):Row = {
val row = new Row(args.length)
0 until args.length foreach {
i => row.setField(i, args(i))
}
row
}
def tableEnv: TableEnvironment = {
if (isStreaming) {
streamEnv
} else {
batchEnv
}
}
def execJob(name: String) = {
if (isStreaming) {
tableEnv.execute(name)
} else {
tableEnv.execute(name)
}
}
@Test
def testInsertInto(): Unit = {
val sourceData = List(
toRow(1, "1000", 2),
toRow(2, "1", 3),
toRow(3, "2000", 4),
toRow(1, "2", 2),
toRow(2, "3000", 3)
)
TestCollectionTableFactory.initData(sourceData)
val sourceDDL =
"""
|create table t1(
| a int,
| b varchar,
| c int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val sinkDDL =
"""
|create table t2(
| a int,
| b varchar,
| c int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val query =
"""
|insert into t2
|select t1.a, t1.b, (t1.a + 1) as c from t1
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(sinkDDL)
tableEnv.sqlUpdate(query)
execJob("testJob")
assertEquals(sourceData.sorted, TestCollectionTableFactory.RESULT.sorted)
}
@Ignore // need to implement
@Test
def testInsertTargetTableWithComputedColumn(): Unit = {
TestCollectionTableFactory.initData(SOURCE_DATA)
val sourceDDL =
"""
|create table t1(
| a int,
| b varchar,
| c int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val sinkDDL =
"""
|create table t2(
| a int,
| b varchar,
| c as a + 1
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val query =
"""
|insert into t2(a, b)
|select t1.a, t1.b from t1
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(sinkDDL)
tableEnv.sqlUpdate(query)
execJob("testJob")
assertEquals(SOURCE_DATA.sorted, TestCollectionTableFactory.RESULT.sorted)
}
@Test
def testInsertWithJoinedSource(): Unit = {
val sourceData = List(
toRow(1, 1000, 2),
toRow(2, 1, 3),
toRow(3, 2000, 4),
toRow(1, 2, 2),
toRow(2, 3000, 3)
)
val expected = List(
toRow(1, 1000, 2, 1),
toRow(1, 2, 2, 1),
toRow(2, 1, 1, 2),
toRow(2, 3000, 1, 2)
)
TestCollectionTableFactory.initData(sourceData)
val sourceDDL =
"""
|create table t1(
| a int,
| b int,
| c int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val sinkDDL =
"""
|create table t2(
| a int,
| b int,
| c int,
| d int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val query =
"""
|insert into t2
|select a.a, a.b, b.a, b.b
| from t1 a
| join t1 b
| on a.a = b.b
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(sinkDDL)
tableEnv.sqlUpdate(query)
execJob("testJob")
assertEquals(expected.sorted, TestCollectionTableFactory.RESULT.sorted)
}
@Test
def testInsertWithAggregateSource(): Unit = {
if (isStreaming) {
return
}
val sourceData = List(
toRow(1, 1000, 2),
toRow(2, 1000, 3),
toRow(3, 2000, 4),
toRow(4, 2000, 5),
toRow(5, 3000, 6)
)
val expected = List(
toRow(3, 1000),
toRow(5, 3000),
toRow(7, 2000)
)
TestCollectionTableFactory.initData(sourceData)
val sourceDDL =
"""
|create table t1(
| a int,
| b int,
| c int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val sinkDDL =
"""
|create table t2(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val query =
"""
|insert into t2
|select sum(a), t1.b from t1 group by t1.b
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(sinkDDL)
tableEnv.sqlUpdate(query)
execJob("testJob")
assertEquals(expected.sorted, TestCollectionTableFactory.RESULT.sorted)
}
@Test @Ignore // need to implement
def testStreamSourceTableWithProctime(): Unit = {
val sourceData = List(
toRow(1, 1000),
toRow(2, 2000),
toRow(3, 3000)
)
TestCollectionTableFactory.initData(sourceData, emitInterval = 1000L)
val sourceDDL =
"""
|create table t1(
| a int,
| b int,
| c as proctime,
| primary key(a)
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val sinkDDL =
"""
|create table t2(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val query =
"""
|insert into t2
|select sum(a), sum(b) from t1 group by TUMBLE(c, INTERVAL '1' SECOND)
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(sinkDDL)
tableEnv.sqlUpdate(query)
execJob("testJob")
assertEquals(TestCollectionTableFactory.RESULT.sorted, sourceData.sorted)
}
@Test @Ignore("FLINK-14320") // need to implement
def testStreamSourceTableWithRowtime(): Unit = {
val sourceData = List(
toRow(1, 1000),
toRow(2, 2000),
toRow(3, 3000)
)
TestCollectionTableFactory.initData(sourceData, emitInterval = 1000L)
val sourceDDL =
"""
|create table t1(
| a timestamp(3),
| b bigint,
| WATERMARK FOR a AS a - interval '1' SECOND
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val sinkDDL =
"""
|create table t2(
| a timestamp(3),
| b bigint
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val query =
"""
|insert into t2
|select a, sum(b) from t1 group by TUMBLE(a, INTERVAL '1' SECOND)
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(sinkDDL)
tableEnv.sqlUpdate(query)
execJob("testJob")
assertEquals(TestCollectionTableFactory.RESULT.sorted, sourceData.sorted)
}
@Test @Ignore // need to implement
def testBatchSourceTableWithProctime(): Unit = {
val sourceData = List(
toRow(1, 1000),
toRow(2, 2000),
toRow(3, 3000)
)
TestCollectionTableFactory.initData(sourceData, emitInterval = 1000L)
val sourceDDL =
"""
|create table t1(
| a int,
| b int,
| c as proctime,
| primary key(a)
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val sinkDDL =
"""
|create table t2(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val query =
"""
|insert into t2
|select sum(a), sum(b) from t1 group by TUMBLE(c, INTERVAL '1' SECOND)
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(sinkDDL)
tableEnv.sqlUpdate(query)
execJob("testJob")
assertEquals(TestCollectionTableFactory.RESULT.sorted, sourceData.sorted)
}
@Test @Ignore("FLINK-14320") // need to implement
def testBatchTableWithRowtime(): Unit = {
val sourceData = List(
toRow(1, 1000),
toRow(2, 2000),
toRow(3, 3000)
)
TestCollectionTableFactory.initData(sourceData, emitInterval = 1000L)
val sourceDDL =
"""
|create table t1(
| a timestamp(3),
| b bigint,
| WATERMARK FOR a AS a - interval '1' SECOND
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val sinkDDL =
"""
|create table t2(
| a timestamp(3),
| b bigint
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val query =
"""
|insert into t2
|select a, sum(b) from t1 group by TUMBLE(a, INTERVAL '1' SECOND)
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(sinkDDL)
tableEnv.sqlUpdate(query)
execJob("testJob")
assertEquals(TestCollectionTableFactory.RESULT.sorted, sourceData.sorted)
}
@Test
def testDropTableWithFullPath(): Unit = {
val ddl1 =
"""
|create table t1(
| a bigint,
| b bigint,
| c varchar
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val ddl2 =
"""
|create table t2(
| a bigint,
| b bigint
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
tableEnv.sqlUpdate(ddl1)
tableEnv.sqlUpdate(ddl2)
assert(tableEnv.listTables().sameElements(Array[String]("t1", "t2")))
tableEnv.sqlUpdate("DROP TABLE default_catalog.default_database.t2")
assert(tableEnv.listTables().sameElements(Array("t1")))
}
@Test
def testDropTableWithPartialPath(): Unit = {
val ddl1 =
"""
|create table t1(
| a bigint,
| b bigint,
| c varchar
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val ddl2 =
"""
|create table t2(
| a bigint,
| b bigint
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
tableEnv.sqlUpdate(ddl1)
tableEnv.sqlUpdate(ddl2)
assert(tableEnv.listTables().sameElements(Array[String]("t1", "t2")))
tableEnv.sqlUpdate("DROP TABLE default_database.t2")
tableEnv.sqlUpdate("DROP TABLE t1")
assert(tableEnv.listTables().isEmpty)
}
@Test(expected = classOf[ValidationException])
def testDropTableWithInvalidPath(): Unit = {
val ddl1 =
"""
|create table t1(
| a bigint,
| b bigint,
| c varchar
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
tableEnv.sqlUpdate(ddl1)
assert(tableEnv.listTables().sameElements(Array[String]("t1")))
tableEnv.sqlUpdate("DROP TABLE catalog1.database1.t1")
}
@Test
def testDropTableWithInvalidPathIfExists(): Unit = {
val ddl1 =
"""
|create table t1(
| a bigint,
| b bigint,
| c varchar
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
tableEnv.sqlUpdate(ddl1)
assert(tableEnv.listTables().sameElements(Array[String]("t1")))
tableEnv.sqlUpdate("DROP TABLE IF EXISTS catalog1.database1.t1")
assert(tableEnv.listTables().sameElements(Array[String]("t1")))
}
@Test
def testAlterTable(): Unit = {
val ddl1 =
"""
|create table t1(
| a bigint,
| b bigint,
| c varchar
|) with (
| 'connector' = 'COLLECTION',
| 'k1' = 'v1'
|)
""".stripMargin
tableEnv.sqlUpdate(ddl1)
tableEnv.sqlUpdate("alter table t1 rename to t2")
assert(tableEnv.listTables().sameElements(Array[String]("t2")))
tableEnv.sqlUpdate("alter table t2 set ('k1' = 'a', 'k2' = 'b')")
val expectedProperties = new util.HashMap[String, String]()
expectedProperties.put("connector", "COLLECTION")
expectedProperties.put("k1", "a")
expectedProperties.put("k2", "b")
val properties = tableEnv.getCatalog(tableEnv.getCurrentCatalog).get()
.getTable(new ObjectPath(tableEnv.getCurrentDatabase, "t2"))
.getProperties
assertEquals(expectedProperties, properties)
}
@Test
def testUseCatalog(): Unit = {
tableEnv.registerCatalog("cat1", new GenericInMemoryCatalog("cat1"))
tableEnv.registerCatalog("cat2", new GenericInMemoryCatalog("cat2"))
tableEnv.sqlUpdate("use catalog cat1")
assertEquals("cat1", tableEnv.getCurrentCatalog)
tableEnv.sqlUpdate("use catalog cat2")
assertEquals("cat2", tableEnv.getCurrentCatalog)
}
@Test
def testUseDatabase(): Unit = {
val catalog = new GenericInMemoryCatalog("cat1")
tableEnv.registerCatalog("cat1", catalog)
val catalogDB1 = new CatalogDatabaseImpl(new util.HashMap[String, String](), "db1")
val catalogDB2 = new CatalogDatabaseImpl(new util.HashMap[String, String](), "db2")
catalog.createDatabase("db1", catalogDB1, true)
catalog.createDatabase("db2", catalogDB2, true)
tableEnv.sqlUpdate("use cat1.db1")
assertEquals("db1", tableEnv.getCurrentDatabase)
tableEnv.sqlUpdate("use db2")
assertEquals("db2", tableEnv.getCurrentDatabase)
}
@Test
def testCreateDatabase: Unit = {
tableEnv.registerCatalog("cat1", new GenericInMemoryCatalog("default"))
tableEnv.registerCatalog("cat2", new GenericInMemoryCatalog("default"))
tableEnv.sqlUpdate("use catalog cat1")
tableEnv.sqlUpdate("create database db1 ")
tableEnv.sqlUpdate("create database if not exists db1 ")
try {
tableEnv.sqlUpdate("create database db1 ")
fail("ValidationException expected")
} catch {
case _: ValidationException => //ignore
}
tableEnv.sqlUpdate("create database cat2.db1 comment 'test_comment'" +
" with ('k1' = 'v1', 'k2' = 'v2')")
val database = tableEnv.getCatalog("cat2").get().getDatabase("db1")
assertEquals("test_comment", database.getComment)
assertEquals(2, database.getProperties.size())
val expectedProperty = new util.HashMap[String, String]()
expectedProperty.put("k1", "v1")
expectedProperty.put("k2", "v2")
assertEquals(expectedProperty, database.getProperties)
}
@Test
def testDropDatabase: Unit = {
tableEnv.registerCatalog("cat1", new GenericInMemoryCatalog("default"))
tableEnv.sqlUpdate("use catalog cat1")
tableEnv.sqlUpdate("create database db1")
tableEnv.sqlUpdate("drop database db1")
tableEnv.sqlUpdate("drop database if exists db1")
try {
tableEnv.sqlUpdate("drop database db1")
fail("ValidationException expected")
} catch {
case _: ValidationException => //ignore
}
tableEnv.sqlUpdate("create database db1")
tableEnv.sqlUpdate("use db1")
val ddl1 =
"""
|create table t1(
| a bigint,
| b bigint,
| c varchar
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
tableEnv.sqlUpdate(ddl1)
val ddl2 =
"""
|create table t2(
| a bigint,
| b bigint,
| c varchar
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
tableEnv.sqlUpdate(ddl2)
try {
tableEnv.sqlUpdate("drop database db1")
fail("ValidationException expected")
} catch {
case _: ValidationException => //ignore
}
tableEnv.sqlUpdate("drop database db1 cascade")
}
@Test
def testAlterDatabase: Unit = {
tableEnv.registerCatalog("cat1", new GenericInMemoryCatalog("default"))
tableEnv.sqlUpdate("use catalog cat1")
tableEnv.sqlUpdate("create database db1 comment 'db1_comment' with ('k1' = 'v1')")
tableEnv.sqlUpdate("alter database db1 set ('k1' = 'a', 'k2' = 'b')")
val database = tableEnv.getCatalog("cat1").get().getDatabase("db1")
assertEquals("db1_comment", database.getComment)
assertEquals(2, database.getProperties.size())
val expectedProperty = new util.HashMap[String, String]()
expectedProperty.put("k1", "a")
expectedProperty.put("k2", "b")
assertEquals(expectedProperty, database.getProperties)
}
@Test(expected = classOf[ValidationException])
def testCreateViewTwice(): Unit = {
val sourceData = List(
toRow(1, 1000),
toRow(2, 2000),
toRow(3, 3000)
)
TestCollectionTableFactory.initData(sourceData)
val sourceDDL =
"""
|CREATE TABLE T1(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val sinkDDL =
"""
|CREATE TABLE T2(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val viewDDL =
"""
|CREATE VIEW T3(c, d) AS SELECT a, b FROM T1
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(sinkDDL)
tableEnv.sqlUpdate(viewDDL)
tableEnv.sqlUpdate(viewDDL)
}
@Test(expected = classOf[ValidationException])
def testCreateTemporaryViewTwice(): Unit = {
val sourceData = List(
toRow(1, 1000),
toRow(2, 2000),
toRow(3, 3000)
)
TestCollectionTableFactory.initData(sourceData)
val sourceDDL =
"""
|CREATE TABLE T1(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val sinkDDL =
"""
|CREATE TABLE T2(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val viewDDL =
"""
|CREATE TEMPORARY VIEW T3(c, d) AS SELECT a, b FROM T1
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(sinkDDL)
tableEnv.sqlUpdate(viewDDL)
tableEnv.sqlUpdate(viewDDL)
}
@Test
def testCreateViewIfNotExistsTwice(): Unit = {
val sourceData = List(
toRow(1, 1000),
toRow(2, 2000),
toRow(3, 3000)
)
TestCollectionTableFactory.initData(sourceData)
val sourceDDL =
"""
|CREATE TABLE T1(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val sinkDDL =
"""
|CREATE TABLE T2(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val viewDDL =
"""
|CREATE VIEW IF NOT EXISTS T3(c, d) AS SELECT a, b FROM T1
""".stripMargin
val query = "SELECT c, d FROM T3"
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(sinkDDL)
tableEnv.sqlUpdate(viewDDL)
tableEnv.sqlUpdate(viewDDL)
val result = tableEnv.sqlQuery(query)
result.insertInto("T2")
execJob("testJob")
assertEquals(sourceData.sorted, TestCollectionTableFactory.RESULT.sorted)
}
@Test
def testCreateTemporaryViewIfNotExistsTwice(): Unit = {
val sourceData = List(
toRow(1, 1000),
toRow(2, 2000),
toRow(3, 3000)
)
TestCollectionTableFactory.initData(sourceData)
val sourceDDL =
"""
|CREATE TABLE T1(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val sinkDDL =
"""
|CREATE TABLE T2(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val viewDDL =
"""
|CREATE TEMPORARY VIEW IF NOT EXISTS T3(c, d) AS SELECT a, b FROM T1
""".stripMargin
val query = "SELECT c, d FROM T3"
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(sinkDDL)
tableEnv.sqlUpdate(viewDDL)
tableEnv.sqlUpdate(viewDDL)
val result = tableEnv.sqlQuery(query)
result.insertInto("T2")
execJob("testJob")
assertEquals(sourceData.sorted, TestCollectionTableFactory.RESULT.sorted)
}
@Test
def testDropViewWithFullPath(): Unit = {
val sourceDDL =
"""
|CREATE TABLE T1(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val view1DDL =
"""
|CREATE VIEW T2(c, d) AS SELECT a, b FROM T1
""".stripMargin
val view2DDL =
"""
|CREATE VIEW T3(x, y) AS SELECT a, b FROM T1
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(view1DDL)
tableEnv.sqlUpdate(view2DDL)
assert(tableEnv.listTables().sameElements(Array[String]("T1", "T2", "T3")))
tableEnv.sqlUpdate("DROP VIEW default_catalog.default_database.T2")
assert(tableEnv.listTables().sameElements(Array[String]("T1", "T3")))
tableEnv.sqlUpdate("DROP VIEW default_catalog.default_database.T3")
assert(tableEnv.listTables().sameElements(Array[String]("T1")))
}
@Test
def testDropViewWithPartialPath(): Unit = {
val sourceDDL =
"""
|CREATE TABLE T1(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val view1DDL =
"""
|CREATE VIEW T2(c, d) AS SELECT a, b FROM T1
""".stripMargin
val view2DDL =
"""
|CREATE VIEW T3(x, y) AS SELECT a, b FROM T1
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(view1DDL)
tableEnv.sqlUpdate(view2DDL)
assert(tableEnv.listTables().sameElements(Array[String]("T1", "T2", "T3")))
tableEnv.sqlUpdate("DROP VIEW T2")
assert(tableEnv.listTables().sameElements(Array[String]("T1", "T3")))
tableEnv.sqlUpdate("DROP VIEW default_database.T3")
assert(tableEnv.listTables().sameElements(Array[String]("T1")))
}
@Test(expected = classOf[ValidationException])
def testDropViewWithInvalidPath(): Unit = {
val sourceDDL =
"""
|CREATE TABLE T1(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val viewDDL =
"""
|CREATE VIEW T2(c, d) AS SELECT a, b FROM T1
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(viewDDL)
assert(tableEnv.listTables().sameElements(Array[String]("T1", "T2")))
tableEnv.sqlUpdate("DROP VIEW default_catalog1.default_database1.T2")
}
@Test
def testDropViewWithInvalidPathIfExists(): Unit = {
val sourceDDL =
"""
|CREATE TABLE T1(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val viewDDL =
"""
|CREATE VIEW T2(c, d) AS SELECT a, b FROM T1
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(viewDDL)
assert(tableEnv.listTables().sameElements(Array[String]("T1", "T2")))
tableEnv.sqlUpdate("DROP VIEW IF EXISTS default_catalog1.default_database1.T2")
assert(tableEnv.listTables().sameElements(Array[String]("T1", "T2")))
}
@Test(expected = classOf[ValidationException])
def testDropViewTwice(): Unit = {
val sourceDDL =
"""
|CREATE TABLE T1(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val viewDDL =
"""
|CREATE VIEW T2(c, d) AS SELECT a, b FROM T1
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(viewDDL)
assert(tableEnv.listTables().sameElements(Array[String]("T1", "T2")))
tableEnv.sqlUpdate("DROP VIEW default_catalog.default_database.T2")
assert(tableEnv.listTables().sameElements(Array[String]("T1")))
tableEnv.sqlUpdate("DROP VIEW default_catalog.default_database.T2")
}
@Test
def testDropViewIfExistsTwice(): Unit = {
val sourceDDL =
"""
|CREATE TABLE T1(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val viewDDL =
"""
|CREATE VIEW T2(c, d) AS SELECT a, b FROM T1
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(viewDDL)
assert(tableEnv.listTables().sameElements(Array[String]("T1", "T2")))
tableEnv.sqlUpdate("DROP VIEW IF EXISTS default_catalog.default_database.T2")
assert(tableEnv.listTables().sameElements(Array[String]("T1")))
tableEnv.sqlUpdate("DROP VIEW IF EXISTS default_catalog.default_database.T2")
assert(tableEnv.listTables().sameElements(Array[String]("T1")))
}
@Test(expected = classOf[ValidationException])
def testDropTemporaryViewTwice(): Unit = {
val sourceDDL =
"""
|CREATE TABLE T1(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val viewDDL =
"""
|CREATE TEMPORARY VIEW T2(c, d) AS SELECT a, b FROM T1
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(viewDDL)
assert(tableEnv.listTemporaryViews().sameElements(Array[String]("T2")))
tableEnv.sqlUpdate("DROP TEMPORARY VIEW default_catalog.default_database.T2")
assert(tableEnv.listTemporaryViews().sameElements(Array[String]()))
tableEnv.sqlUpdate("DROP TEMPORARY VIEW default_catalog.default_database.T2")
}
@Test
def testDropTemporaryViewIfExistsTwice(): Unit = {
val sourceDDL =
"""
|CREATE TABLE T1(
| a int,
| b int
|) with (
| 'connector' = 'COLLECTION'
|)
""".stripMargin
val viewDDL =
"""
|CREATE TEMPORARY VIEW T2(c, d) AS SELECT a, b FROM T1
""".stripMargin
tableEnv.sqlUpdate(sourceDDL)
tableEnv.sqlUpdate(viewDDL)
assert(tableEnv.listTemporaryViews().sameElements(Array[String]("T2")))
tableEnv.sqlUpdate("DROP TEMPORARY VIEW IF EXISTS default_catalog.default_database.T2")
assert(tableEnv.listTemporaryViews().sameElements(Array[String]()))
tableEnv.sqlUpdate("DROP TEMPORARY VIEW IF EXISTS default_catalog.default_database.T2")
assert(tableEnv.listTemporaryViews().sameElements(Array[String]()))
}
}
object CatalogTableITCase {
@Parameterized.Parameters(name = "{0}")
def parameters(): java.util.Collection[Boolean] = {
util.Arrays.asList(true, false)
}
}
| bowenli86/flink | flink-table/flink-table-planner/src/test/scala/org/apache/flink/table/catalog/CatalogTableITCase.scala | Scala | apache-2.0 | 28,734 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql
import org.apache.spark.sql.test.SharedSQLContext
class MiscFunctionsSuite extends QueryTest with SharedSQLContext {
import testImplicits._
test("reflect and java_method") {
val df = Seq((1, "one")).toDF("a", "b")
val className = ReflectClass.getClass.getName.stripSuffix("$")
checkAnswer(
df.selectExpr(
s"reflect('$className', 'method1', a, b)",
s"java_method('$className', 'method1', a, b)"),
Row("m1one", "m1one"))
}
}
object ReflectClass {
def method1(v1: Int, v2: String): String = "m" + v1 + v2
}
| mike0sv/spark | sql/core/src/test/scala/org/apache/spark/sql/MiscFunctionsSuite.scala | Scala | apache-2.0 | 1,387 |
package at.logic.gapt.examples
import org.specs2.mutable.Specification
class SimpleTest extends Specification {
"fol1" in { fol1.ctx.check( fol1.proof ); ok }
}
| gebner/gapt | tests/src/test/scala/at/logic/gapt/examples/SimpleTest.scala | Scala | gpl-3.0 | 165 |
package wp.pigeonapp1
object PigeonModel {
sealed trait Status
case object Landing extends Status
case object Starting extends Status
case class Flying(action: Action) extends Status
case class Standing(action: Action) extends Status
sealed trait Action
case object Pecking extends Action //dziobać
case object Shitting extends Action
case class Pigeon(name: String, age: Int, status: Status)
type PigeonId = Int
var pigeons = Map[PigeonId, Pigeon](
1 -> Pigeon("Lucy", 1, Landing),
2 -> Pigeon("Bob", 2, Flying(Shitting)),
3 -> Pigeon("Carmen", 2, Starting),
4 -> Pigeon("Graham", 2, Standing(Pecking)),
5 -> Pigeon("All", 2, Landing)
)
def nextId(): PigeonId = pigeons.keySet.max + 1
def addPigeon(pigeon: Pigeon): PigeonId = {
val id = nextId()
pigeons = pigeons + (id -> pigeon)
id
}
def removePigeon(): Unit = {
pigeons = pigeons.tail
}
}
| jawp/wicked-playground | modules/frontend/src/main/scala/wp/pigeonapp1/PigeonModel.scala | Scala | mit | 924 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.encoders
import java.math.BigInteger
import java.sql.{Date, Timestamp}
import java.util.Arrays
import scala.collection.mutable.ArrayBuffer
import scala.reflect.runtime.universe.TypeTag
import org.apache.spark.sql.{Encoder, Encoders}
import org.apache.spark.sql.catalyst.{FooClassWithEnum, FooEnum, OptionalData, PrimitiveData}
import org.apache.spark.sql.catalyst.analysis.AnalysisTest
import org.apache.spark.sql.catalyst.dsl.plans._
import org.apache.spark.sql.catalyst.expressions.AttributeReference
import org.apache.spark.sql.catalyst.plans.CodegenInterpretedPlanTest
import org.apache.spark.sql.catalyst.plans.logical.LocalRelation
import org.apache.spark.sql.catalyst.util.ArrayData
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types._
import org.apache.spark.unsafe.types.{CalendarInterval, UTF8String}
import org.apache.spark.util.ClosureCleaner
case class RepeatedStruct(s: Seq[PrimitiveData])
case class NestedArray(a: Array[Array[Int]]) {
override def hashCode(): Int =
java.util.Arrays.deepHashCode(a.asInstanceOf[Array[AnyRef]])
override def equals(other: Any): Boolean = other match {
case NestedArray(otherArray) =>
java.util.Arrays.deepEquals(
a.asInstanceOf[Array[AnyRef]],
otherArray.asInstanceOf[Array[AnyRef]])
case _ => false
}
}
case class BoxedData(
intField: java.lang.Integer,
longField: java.lang.Long,
doubleField: java.lang.Double,
floatField: java.lang.Float,
shortField: java.lang.Short,
byteField: java.lang.Byte,
booleanField: java.lang.Boolean)
case class RepeatedData(
arrayField: Seq[Int],
arrayFieldContainsNull: Seq[java.lang.Integer],
mapField: scala.collection.Map[Int, Long],
mapFieldNull: scala.collection.Map[Int, java.lang.Long],
structField: PrimitiveData)
/** For testing Kryo serialization based encoder. */
class KryoSerializable(val value: Int) {
override def hashCode(): Int = value
override def equals(other: Any): Boolean = other match {
case that: KryoSerializable => this.value == that.value
case _ => false
}
}
/** For testing Java serialization based encoder. */
class JavaSerializable(val value: Int) extends Serializable {
override def hashCode(): Int = value
override def equals(other: Any): Boolean = other match {
case that: JavaSerializable => this.value == that.value
case _ => false
}
}
/** For testing UDT for a case class */
@SQLUserDefinedType(udt = classOf[UDTForCaseClass])
case class UDTCaseClass(uri: java.net.URI)
class UDTForCaseClass extends UserDefinedType[UDTCaseClass] {
override def sqlType: DataType = StringType
override def serialize(obj: UDTCaseClass): UTF8String = {
UTF8String.fromString(obj.uri.toString)
}
override def userClass: Class[UDTCaseClass] = classOf[UDTCaseClass]
override def deserialize(datum: Any): UDTCaseClass = datum match {
case uri: UTF8String => UDTCaseClass(new java.net.URI(uri.toString))
}
}
case class Bar(i: Any)
case class Foo(i: Bar) extends AnyVal
case class PrimitiveValueClass(wrapped: Int) extends AnyVal
case class ReferenceValueClass(wrapped: ReferenceValueClass.Container) extends AnyVal
object ReferenceValueClass {
case class Container(data: Int)
}
case class IntAndString(i: Int, s: String)
case class StringWrapper(s: String) extends AnyVal
case class ValueContainer(
a: Int,
b: StringWrapper) // a string column
case class IntWrapper(i: Int) extends AnyVal
case class ComplexValueClassContainer(
a: Int,
b: ValueContainer,
c: IntWrapper)
case class SeqOfValueClass(s: Seq[StringWrapper])
case class MapOfValueClassKey(m: Map[IntWrapper, String])
case class MapOfValueClassValue(m: Map[String, StringWrapper])
case class OptionOfValueClassValue(o: Option[StringWrapper])
case class CaseClassWithGeneric[T](generic: T, value: IntWrapper)
class ExpressionEncoderSuite extends CodegenInterpretedPlanTest with AnalysisTest {
OuterScopes.addOuterScope(this)
implicit def encoder[T : TypeTag]: ExpressionEncoder[T] = verifyNotLeakingReflectionObjects {
ExpressionEncoder()
}
// test flat encoders
encodeDecodeTest(false, "primitive boolean")
encodeDecodeTest(-3.toByte, "primitive byte")
encodeDecodeTest(-3.toShort, "primitive short")
encodeDecodeTest(-3, "primitive int")
encodeDecodeTest(-3L, "primitive long")
encodeDecodeTest(-3.7f, "primitive float")
encodeDecodeTest(-3.7, "primitive double")
encodeDecodeTest(java.lang.Boolean.FALSE, "boxed boolean")
encodeDecodeTest(java.lang.Byte.valueOf(-3: Byte), "boxed byte")
encodeDecodeTest(java.lang.Short.valueOf(-3: Short), "boxed short")
encodeDecodeTest(java.lang.Integer.valueOf(-3), "boxed int")
encodeDecodeTest(java.lang.Long.valueOf(-3L), "boxed long")
encodeDecodeTest(java.lang.Float.valueOf(-3.7f), "boxed float")
encodeDecodeTest(java.lang.Double.valueOf(-3.7), "boxed double")
encodeDecodeTest(BigDecimal("32131413.211321313"), "scala decimal")
encodeDecodeTest(new java.math.BigDecimal("231341.23123"), "java decimal")
encodeDecodeTest(BigInt("23134123123"), "scala biginteger")
encodeDecodeTest(new BigInteger("23134123123"), "java BigInteger")
encodeDecodeTest(Decimal("32131413.211321313"), "catalyst decimal")
encodeDecodeTest("hello", "string")
encodeDecodeTest(Date.valueOf("2012-12-23"), "date")
encodeDecodeTest(Timestamp.valueOf("2016-01-29 10:00:00"), "timestamp")
encodeDecodeTest(Array(Timestamp.valueOf("2016-01-29 10:00:00")), "array of timestamp")
encodeDecodeTest(Array[Byte](13, 21, -23), "binary")
encodeDecodeTest(Seq(31, -123, 4), "seq of int")
encodeDecodeTest(Seq("abc", "xyz"), "seq of string")
encodeDecodeTest(Seq("abc", null, "xyz"), "seq of string with null")
encodeDecodeTest(Seq.empty[Int], "empty seq of int")
encodeDecodeTest(Seq.empty[String], "empty seq of string")
encodeDecodeTest(Seq(Seq(31, -123), null, Seq(4, 67)), "seq of seq of int")
encodeDecodeTest(Seq(Seq("abc", "xyz"), Seq[String](null), null, Seq("1", null, "2")),
"seq of seq of string")
encodeDecodeTest(Array(31, -123, 4), "array of int")
encodeDecodeTest(Array("abc", "xyz"), "array of string")
encodeDecodeTest(Array("a", null, "x"), "array of string with null")
encodeDecodeTest(Array.empty[Int], "empty array of int")
encodeDecodeTest(Array.empty[String], "empty array of string")
encodeDecodeTest(Array(Array(31, -123), null, Array(4, 67)), "array of array of int")
encodeDecodeTest(Array(Array("abc", "xyz"), Array[String](null), null, Array("1", null, "2")),
"array of array of string")
encodeDecodeTest(Map(1 -> "a", 2 -> "b"), "map")
encodeDecodeTest(Map(1 -> "a", 2 -> null), "map with null")
encodeDecodeTest(Map(1 -> Map("a" -> 1), 2 -> Map("b" -> 2)), "map of map")
encodeDecodeTest(Map(1 -> IntAndString(1, "a")), "map with case class as value")
encodeDecodeTest(Map(IntAndString(1, "a") -> 1), "map with case class as key")
encodeDecodeTest(Map(IntAndString(1, "a") -> IntAndString(2, "b")),
"map with case class as key and value")
encodeDecodeTest(Tuple1[Seq[Int]](null), "null seq in tuple")
encodeDecodeTest(Tuple1[Map[String, String]](null), "null map in tuple")
encodeDecodeTest(List(1, 2), "list of int")
encodeDecodeTest(List("a", null), "list with String and null")
encodeDecodeTest(
UDTCaseClass(new java.net.URI("http://spark.apache.org/")), "udt with case class")
// Kryo encoders
encodeDecodeTest("hello", "kryo string")(encoderFor(Encoders.kryo[String]))
encodeDecodeTest(new KryoSerializable(15), "kryo object")(
encoderFor(Encoders.kryo[KryoSerializable]))
// Java encoders
encodeDecodeTest("hello", "java string")(encoderFor(Encoders.javaSerialization[String]))
encodeDecodeTest(new JavaSerializable(15), "java object")(
encoderFor(Encoders.javaSerialization[JavaSerializable]))
// test product encoders
private def productTest[T <: Product : ExpressionEncoder](
input: T, useFallback: Boolean = false): Unit = {
encodeDecodeTest(input, input.getClass.getSimpleName, useFallback)
}
case class InnerClass(i: Int)
productTest(InnerClass(1))
encodeDecodeTest(Array(InnerClass(1)), "array of inner class")
encodeDecodeTest(Array(Option(InnerClass(1))), "array of optional inner class")
// holder class to trigger Class.getSimpleName issue
object MalformedClassObject extends Serializable {
case class MalformedNameExample(x: Int)
}
{
OuterScopes.addOuterScope(MalformedClassObject)
encodeDecodeTest(
MalformedClassObject.MalformedNameExample(42),
"nested Scala class should work",
useFallback = true)
}
object OuterLevelWithVeryVeryVeryLongClassName1 {
object OuterLevelWithVeryVeryVeryLongClassName2 {
object OuterLevelWithVeryVeryVeryLongClassName3 {
object OuterLevelWithVeryVeryVeryLongClassName4 {
object OuterLevelWithVeryVeryVeryLongClassName5 {
object OuterLevelWithVeryVeryVeryLongClassName6 {
object OuterLevelWithVeryVeryVeryLongClassName7 {
object OuterLevelWithVeryVeryVeryLongClassName8 {
object OuterLevelWithVeryVeryVeryLongClassName9 {
object OuterLevelWithVeryVeryVeryLongClassName10 {
object OuterLevelWithVeryVeryVeryLongClassName11 {
object OuterLevelWithVeryVeryVeryLongClassName12 {
object OuterLevelWithVeryVeryVeryLongClassName13 {
object OuterLevelWithVeryVeryVeryLongClassName14 {
object OuterLevelWithVeryVeryVeryLongClassName15 {
object OuterLevelWithVeryVeryVeryLongClassName16 {
object OuterLevelWithVeryVeryVeryLongClassName17 {
object OuterLevelWithVeryVeryVeryLongClassName18 {
object OuterLevelWithVeryVeryVeryLongClassName19 {
object OuterLevelWithVeryVeryVeryLongClassName20 {
case class MalformedNameExample(x: Int)
}}}}}}}}}}}}}}}}}}}}
{
OuterScopes.addOuterScope(
OuterLevelWithVeryVeryVeryLongClassName1
.OuterLevelWithVeryVeryVeryLongClassName2
.OuterLevelWithVeryVeryVeryLongClassName3
.OuterLevelWithVeryVeryVeryLongClassName4
.OuterLevelWithVeryVeryVeryLongClassName5
.OuterLevelWithVeryVeryVeryLongClassName6
.OuterLevelWithVeryVeryVeryLongClassName7
.OuterLevelWithVeryVeryVeryLongClassName8
.OuterLevelWithVeryVeryVeryLongClassName9
.OuterLevelWithVeryVeryVeryLongClassName10
.OuterLevelWithVeryVeryVeryLongClassName11
.OuterLevelWithVeryVeryVeryLongClassName12
.OuterLevelWithVeryVeryVeryLongClassName13
.OuterLevelWithVeryVeryVeryLongClassName14
.OuterLevelWithVeryVeryVeryLongClassName15
.OuterLevelWithVeryVeryVeryLongClassName16
.OuterLevelWithVeryVeryVeryLongClassName17
.OuterLevelWithVeryVeryVeryLongClassName18
.OuterLevelWithVeryVeryVeryLongClassName19
.OuterLevelWithVeryVeryVeryLongClassName20)
encodeDecodeTest(
OuterLevelWithVeryVeryVeryLongClassName1
.OuterLevelWithVeryVeryVeryLongClassName2
.OuterLevelWithVeryVeryVeryLongClassName3
.OuterLevelWithVeryVeryVeryLongClassName4
.OuterLevelWithVeryVeryVeryLongClassName5
.OuterLevelWithVeryVeryVeryLongClassName6
.OuterLevelWithVeryVeryVeryLongClassName7
.OuterLevelWithVeryVeryVeryLongClassName8
.OuterLevelWithVeryVeryVeryLongClassName9
.OuterLevelWithVeryVeryVeryLongClassName10
.OuterLevelWithVeryVeryVeryLongClassName11
.OuterLevelWithVeryVeryVeryLongClassName12
.OuterLevelWithVeryVeryVeryLongClassName13
.OuterLevelWithVeryVeryVeryLongClassName14
.OuterLevelWithVeryVeryVeryLongClassName15
.OuterLevelWithVeryVeryVeryLongClassName16
.OuterLevelWithVeryVeryVeryLongClassName17
.OuterLevelWithVeryVeryVeryLongClassName18
.OuterLevelWithVeryVeryVeryLongClassName19
.OuterLevelWithVeryVeryVeryLongClassName20
.MalformedNameExample(42),
"deeply nested Scala class should work",
useFallback = true)
}
productTest(PrimitiveData(1, 1, 1, 1, 1, 1, true))
productTest(
OptionalData(Some(2), Some(2), Some(2), Some(2), Some(2), Some(2), Some(true),
Some(PrimitiveData(1, 1, 1, 1, 1, 1, true)), Some(new CalendarInterval(1, 2, 3))))
productTest(OptionalData(None, None, None, None, None, None, None, None, None))
encodeDecodeTest(Seq(Some(1), None), "Option in array")
encodeDecodeTest(Map(1 -> Some(10L), 2 -> Some(20L), 3 -> None), "Option in map",
useFallback = true)
productTest(BoxedData(1, 1L, 1.0, 1.0f, 1.toShort, 1.toByte, true))
productTest(BoxedData(null, null, null, null, null, null, null))
productTest(RepeatedStruct(PrimitiveData(1, 1, 1, 1, 1, 1, true) :: Nil))
productTest((1, "test", PrimitiveData(1, 1, 1, 1, 1, 1, true)))
productTest(
RepeatedData(
Seq(1, 2),
Seq(Integer.valueOf(1), null, Integer.valueOf(2)),
Map(1 -> 2L),
Map(1 -> null),
PrimitiveData(1, 1, 1, 1, 1, 1, true)))
productTest(NestedArray(Array(Array(1, -2, 3), null, Array(4, 5, -6))), useFallback = true)
productTest(("Seq[(String, String)]",
Seq(("a", "b"))))
productTest(("Seq[(Int, Int)]",
Seq((1, 2))))
productTest(("Seq[(Long, Long)]",
Seq((1L, 2L))))
productTest(("Seq[(Float, Float)]",
Seq((1.toFloat, 2.toFloat))))
productTest(("Seq[(Double, Double)]",
Seq((1.toDouble, 2.toDouble))))
productTest(("Seq[(Short, Short)]",
Seq((1.toShort, 2.toShort))))
productTest(("Seq[(Byte, Byte)]",
Seq((1.toByte, 2.toByte))))
productTest(("Seq[(Boolean, Boolean)]",
Seq((true, false))))
productTest(("ArrayBuffer[(String, String)]",
ArrayBuffer(("a", "b"))))
productTest(("ArrayBuffer[(Int, Int)]",
ArrayBuffer((1, 2))))
productTest(("ArrayBuffer[(Long, Long)]",
ArrayBuffer((1L, 2L))))
productTest(("ArrayBuffer[(Float, Float)]",
ArrayBuffer((1.toFloat, 2.toFloat))))
productTest(("ArrayBuffer[(Double, Double)]",
ArrayBuffer((1.toDouble, 2.toDouble))))
productTest(("ArrayBuffer[(Short, Short)]",
ArrayBuffer((1.toShort, 2.toShort))))
productTest(("ArrayBuffer[(Byte, Byte)]",
ArrayBuffer((1.toByte, 2.toByte))))
productTest(("ArrayBuffer[(Boolean, Boolean)]",
ArrayBuffer((true, false))))
productTest(("Seq[Seq[(Int, Int)]]",
Seq(Seq((1, 2)))))
// test for ExpressionEncoder.tuple
encodeDecodeTest(
1 -> 10L,
"tuple with 2 flat encoders")(
ExpressionEncoder.tuple(ExpressionEncoder[Int], ExpressionEncoder[Long]))
encodeDecodeTest(
(PrimitiveData(1, 1, 1, 1, 1, 1, true), (3, 30L)),
"tuple with 2 product encoders")(
ExpressionEncoder.tuple(ExpressionEncoder[PrimitiveData], ExpressionEncoder[(Int, Long)]))
encodeDecodeTest(
(PrimitiveData(1, 1, 1, 1, 1, 1, true), 3),
"tuple with flat encoder and product encoder")(
ExpressionEncoder.tuple(ExpressionEncoder[PrimitiveData], ExpressionEncoder[Int]))
encodeDecodeTest(
(3, PrimitiveData(1, 1, 1, 1, 1, 1, true)),
"tuple with product encoder and flat encoder")(
ExpressionEncoder.tuple(ExpressionEncoder[Int], ExpressionEncoder[PrimitiveData]))
encodeDecodeTest(
(1, (10, 100L)),
"nested tuple encoder") {
val intEnc = ExpressionEncoder[Int]
val longEnc = ExpressionEncoder[Long]
ExpressionEncoder.tuple(intEnc, ExpressionEncoder.tuple(intEnc, longEnc))
}
// test for value classes
encodeDecodeTest(
PrimitiveValueClass(42), "primitive value class")
encodeDecodeTest(
ReferenceValueClass(ReferenceValueClass.Container(1)), "reference value class")
encodeDecodeTest(StringWrapper("a"), "string value class")
encodeDecodeTest(ValueContainer(1, StringWrapper("b")), "nested value class")
encodeDecodeTest(ValueContainer(1, StringWrapper(null)), "nested value class with null")
encodeDecodeTest(ComplexValueClassContainer(1, ValueContainer(2, StringWrapper("b")),
IntWrapper(3)), "complex value class")
encodeDecodeTest(
Array(IntWrapper(1), IntWrapper(2), IntWrapper(3)),
"array of value class")
encodeDecodeTest(Array.empty[IntWrapper], "empty array of value class")
encodeDecodeTest(
Seq(IntWrapper(1), IntWrapper(2), IntWrapper(3)),
"seq of value class")
encodeDecodeTest(Seq.empty[IntWrapper], "empty seq of value class")
encodeDecodeTest(
Map(IntWrapper(1) -> StringWrapper("a"), IntWrapper(2) -> StringWrapper("b")),
"map with value class")
// test for nested value class collections
encodeDecodeTest(
MapOfValueClassKey(Map(IntWrapper(1)-> "a")),
"case class with map of value class key")
encodeDecodeTest(
MapOfValueClassValue(Map("a"-> StringWrapper("b"))),
"case class with map of value class value")
encodeDecodeTest(
SeqOfValueClass(Seq(StringWrapper("a"))),
"case class with seq of class value")
encodeDecodeTest(
OptionOfValueClassValue(Some(StringWrapper("a"))),
"case class with option of class value")
encodeDecodeTest((StringWrapper("a_1"), StringWrapper("a_2")),
"tuple2 of class value")
encodeDecodeTest((StringWrapper("a_1"), StringWrapper("a_2"), StringWrapper("a_3")),
"tuple3 of class value")
encodeDecodeTest(((StringWrapper("a_1"), StringWrapper("a_2")), StringWrapper("b_2")),
"nested tuple._1 of class value")
encodeDecodeTest((StringWrapper("a_1"), (StringWrapper("b_1"), StringWrapper("b_2"))),
"nested tuple._2 of class value")
encodeDecodeTest(CaseClassWithGeneric(IntWrapper(1), IntWrapper(2)),
"case class with value class in generic parameter")
encodeDecodeTest(Option(31), "option of int")
encodeDecodeTest(Option.empty[Int], "empty option of int")
encodeDecodeTest(Option("abc"), "option of string")
encodeDecodeTest(Option.empty[String], "empty option of string")
productTest(("UDT", new ExamplePoint(0.1, 0.2)))
test("AnyVal class with Any fields") {
val exception = intercept[UnsupportedOperationException](implicitly[ExpressionEncoder[Foo]])
val errorMsg = exception.getMessage
assert(errorMsg.contains("root class: \\"org.apache.spark.sql.catalyst.encoders.Foo\\""))
assert(errorMsg.contains("No Encoder found for Any"))
}
test("nullable of encoder schema") {
def checkNullable[T: ExpressionEncoder](nullable: Boolean*): Unit = {
assert(implicitly[ExpressionEncoder[T]].schema.map(_.nullable) === nullable.toSeq)
}
// test for flat encoders
checkNullable[Int](false)
checkNullable[Option[Int]](true)
checkNullable[java.lang.Integer](true)
checkNullable[String](true)
// test for product encoders
checkNullable[(String, Int)](true, false)
checkNullable[(Int, java.lang.Long)](false, true)
// test for nested product encoders
{
val schema = ExpressionEncoder[(Int, (String, Int))].schema
assert(schema(0).nullable === false)
assert(schema(1).nullable)
assert(schema(1).dataType.asInstanceOf[StructType](0).nullable)
assert(schema(1).dataType.asInstanceOf[StructType](1).nullable === false)
}
// test for tupled encoders
{
val schema = ExpressionEncoder.tuple(
ExpressionEncoder[Int],
ExpressionEncoder[(String, Int)]).schema
assert(schema(0).nullable === false)
assert(schema(1).nullable)
assert(schema(1).dataType.asInstanceOf[StructType](0).nullable)
assert(schema(1).dataType.asInstanceOf[StructType](1).nullable === false)
}
}
test("nullable of encoder serializer") {
def checkNullable[T: Encoder](nullable: Boolean): Unit = {
assert(encoderFor[T].objSerializer.nullable === nullable)
}
// test for flat encoders
checkNullable[Int](false)
checkNullable[Option[Int]](true)
checkNullable[java.lang.Integer](true)
checkNullable[String](true)
}
test("null check for map key: String") {
val toRow = ExpressionEncoder[Map[String, Int]]().createSerializer()
val e = intercept[RuntimeException](toRow(Map(("a", 1), (null, 2))))
assert(e.getMessage.contains("Cannot use null as map key"))
}
test("null check for map key: Integer") {
val toRow = ExpressionEncoder[Map[Integer, String]]().createSerializer()
val e = intercept[RuntimeException](toRow(Map((1, "a"), (null, "b"))))
assert(e.getMessage.contains("Cannot use null as map key"))
}
test("throw exception for tuples with more than 22 elements") {
val encoders = (0 to 22).map(_ => Encoders.scalaInt.asInstanceOf[ExpressionEncoder[_]])
val e = intercept[UnsupportedOperationException] {
ExpressionEncoder.tuple(encoders)
}
assert(e.getMessage.contains("tuple with more than 22 elements are not supported"))
}
encodeDecodeTest((1, FooEnum.E1), "Tuple with Int and scala Enum")
encodeDecodeTest((null, FooEnum.E1, FooEnum.E2), "Tuple with Null and scala Enum")
encodeDecodeTest(Seq(FooEnum.E1, null), "Seq with scala Enum")
encodeDecodeTest(Map("key" -> FooEnum.E1), "Map with String key and scala Enum",
useFallback = true)
encodeDecodeTest(Map(FooEnum.E1 -> "value"), "Map with scala Enum key and String value",
useFallback = true)
encodeDecodeTest(FooClassWithEnum(1, FooEnum.E1), "case class with Int and scala Enum")
encodeDecodeTest(FooEnum.E1, "scala Enum")
// Scala / Java big decimals ----------------------------------------------------------
encodeDecodeTest(BigDecimal(("9" * 20) + "." + "9" * 18),
"scala decimal within precision/scale limit")
encodeDecodeTest(new java.math.BigDecimal(("9" * 20) + "." + "9" * 18),
"java decimal within precision/scale limit")
encodeDecodeTest(-BigDecimal(("9" * 20) + "." + "9" * 18),
"negative scala decimal within precision/scale limit")
encodeDecodeTest(new java.math.BigDecimal(("9" * 20) + "." + "9" * 18).negate,
"negative java decimal within precision/scale limit")
testOverflowingBigNumeric(BigDecimal("1" * 21), "scala big decimal")
testOverflowingBigNumeric(new java.math.BigDecimal("1" * 21), "java big decimal")
testOverflowingBigNumeric(-BigDecimal("1" * 21), "negative scala big decimal")
testOverflowingBigNumeric(new java.math.BigDecimal("1" * 21).negate, "negative java big decimal")
testOverflowingBigNumeric(BigDecimal(("1" * 21) + ".123"),
"scala big decimal with fractional part")
testOverflowingBigNumeric(new java.math.BigDecimal(("1" * 21) + ".123"),
"java big decimal with fractional part")
testOverflowingBigNumeric(BigDecimal(("1" * 21) + "." + "9999" * 100),
"scala big decimal with long fractional part")
testOverflowingBigNumeric(new java.math.BigDecimal(("1" * 21) + "." + "9999" * 100),
"java big decimal with long fractional part")
// Scala / Java big integers ----------------------------------------------------------
encodeDecodeTest(BigInt("9" * 38), "scala big integer within precision limit")
encodeDecodeTest(new BigInteger("9" * 38), "java big integer within precision limit")
encodeDecodeTest(-BigInt("9" * 38),
"negative scala big integer within precision limit")
encodeDecodeTest(new BigInteger("9" * 38).negate(),
"negative java big integer within precision limit")
testOverflowingBigNumeric(BigInt("1" * 39), "scala big int")
testOverflowingBigNumeric(new BigInteger("1" * 39), "java big integer")
testOverflowingBigNumeric(-BigInt("1" * 39), "negative scala big int")
testOverflowingBigNumeric(new BigInteger("1" * 39).negate, "negative java big integer")
testOverflowingBigNumeric(BigInt("9" * 100), "scala very large big int")
testOverflowingBigNumeric(new BigInteger("9" * 100), "java very big int")
private def testOverflowingBigNumeric[T: TypeTag](bigNumeric: T, testName: String): Unit = {
Seq(true, false).foreach { ansiEnabled =>
testAndVerifyNotLeakingReflectionObjects(
s"overflowing $testName, ansiEnabled=$ansiEnabled") {
withSQLConf(
SQLConf.ANSI_ENABLED.key -> ansiEnabled.toString
) {
// Need to construct Encoder here rather than implicitly resolving it
// so that SQLConf changes are respected.
val encoder = ExpressionEncoder[T]()
val toRow = encoder.createSerializer()
if (!ansiEnabled) {
val fromRow = encoder.resolveAndBind().createDeserializer()
val convertedBack = fromRow(toRow(bigNumeric))
assert(convertedBack === null)
} else {
val e = intercept[RuntimeException] {
toRow(bigNumeric)
}
assert(e.getMessage.contains("Error while encoding"))
assert(e.getCause.getClass === classOf[ArithmeticException])
}
}
}
}
}
private def encodeDecodeTest[T : ExpressionEncoder](
input: T,
testName: String,
useFallback: Boolean = false): Unit = {
testAndVerifyNotLeakingReflectionObjects(s"encode/decode for $testName: $input", useFallback) {
val encoder = implicitly[ExpressionEncoder[T]]
// Make sure encoder is serializable.
ClosureCleaner.clean((s: String) => encoder.getClass.getName)
val row = encoder.createSerializer().apply(input)
val schema = encoder.schema.toAttributes
val boundEncoder = encoder.resolveAndBind()
val convertedBack = try boundEncoder.createDeserializer().apply(row) catch {
case e: Exception =>
fail(
s"""Exception thrown while decoding
|Converted: $row
|Schema: ${schema.mkString(",")}
|${encoder.schema.treeString}
|
|Encoder:
|$boundEncoder
|
""".stripMargin, e)
}
// Test the correct resolution of serialization / deserialization.
val attr = AttributeReference("obj", encoder.deserializer.dataType)()
val plan = LocalRelation(attr).serialize[T].deserialize[T]
assertAnalysisSuccess(plan)
val isCorrect = (input, convertedBack) match {
case (b1: Array[Byte], b2: Array[Byte]) => Arrays.equals(b1, b2)
case (b1: Array[Int], b2: Array[Int]) => Arrays.equals(b1, b2)
case (b1: Array[_], b2: Array[_]) =>
Arrays.deepEquals(b1.asInstanceOf[Array[AnyRef]], b2.asInstanceOf[Array[AnyRef]])
case (left: Comparable[_], right: Comparable[_]) =>
left.asInstanceOf[Comparable[Any]].compareTo(right) == 0
case _ => input == convertedBack
}
if (!isCorrect) {
val types = convertedBack match {
case c: Product =>
c.productIterator.filter(_ != null).map(_.getClass.getName).mkString(",")
case other => other.getClass.getName
}
val encodedData = try {
row.toSeq(encoder.schema).zip(schema).map {
case (a: ArrayData, AttributeReference(_, ArrayType(et, _), _, _)) =>
a.toArray[Any](et).toSeq
case (other, _) =>
other
}.mkString("[", ",", "]")
} catch {
case e: Throwable => s"Failed to toSeq: $e"
}
fail(
s"""Encoded/Decoded data does not match input data
|
|in: $input
|out: $convertedBack
|types: $types
|
|Encoded Data: $encodedData
|Schema: ${schema.mkString(",")}
|${encoder.schema.treeString}
|
|fromRow Expressions:
|${boundEncoder.deserializer.treeString}
""".stripMargin)
}
}
}
/**
* Verify the size of scala.reflect.runtime.JavaUniverse.undoLog before and after `func` to
* ensure we don't leak Scala reflection garbage.
*
* @see org.apache.spark.sql.catalyst.ScalaReflection.cleanUpReflectionObjects
*/
private def verifyNotLeakingReflectionObjects[T](func: => T): T = {
def undoLogSize: Int = {
scala.reflect.runtime.universe
.asInstanceOf[scala.reflect.runtime.JavaUniverse].undoLog.log.size
}
val previousUndoLogSize = undoLogSize
val r = func
assert(previousUndoLogSize == undoLogSize)
r
}
private def testAndVerifyNotLeakingReflectionObjects(
testName: String, useFallback: Boolean = false)(testFun: => Any): Unit = {
if (useFallback) {
testFallback(testName) {
verifyNotLeakingReflectionObjects(testFun)
}
} else {
test(testName) {
verifyNotLeakingReflectionObjects(testFun)
}
}
}
}
| chuckchen/spark | sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/encoders/ExpressionEncoderSuite.scala | Scala | apache-2.0 | 29,775 |
package reader.connectors
import reader.data.FeedStore
import reader.data.XmlParser
import reader.network.Fetcher
import reader.network.UrlChecker
import rescala._
trait EventMediator {
def mediate(fetcher: Fetcher,
parser: XmlParser,
store: FeedStore,
checker: UrlChecker): Unit
}
/**
* The CentralizedEvents class connects the fetcher, parser and the feed store,
* as they have no knowledge about each other.
* CentralizedEvents in specific uses the classses as follows:
* 1. the fetcher fetches a xml feed
* 2. the parser is connected to the rssFetched event of the fetcher and parses the data
* 3. the store is connected to events which are triggerd after the parser has parsed channel or an item
* 4. if the checker has a valid (checked) url then it is added to the fetcher
*/
object CentralizedEvents extends EventMediator {
def mediate(fetcher: Fetcher, parser: XmlParser, store: FeedStore, checker: UrlChecker): Unit = {
fetcher.rssFetched += { case (xml, url) => parser.parseRSS(xml, url) } //#HDL
}
}
object SimpleReporter extends EventMediator {
def mediate(fetcher: Fetcher, parser: XmlParser, store: FeedStore, checker: UrlChecker): Unit = {
store.channels.changed += { x => println("Channels in store changed. Size: " + x.size) } //#HDL //#IF
fetcher.rssFetched += { _ => println("New content fetched") } //#HDL
parser.channelParsed += { _ => println("A channel was parsed") } //#HDL
parser.itemParsed += { _ => println("An item was parsed") } //#HDL
fetcher.state.changed += println _ //#IF //#HDL
}
}
| volkc/REScala | Examples/RSSReader/ReactiveScalaReader.Reactive/src/main/scala/reader/connectors/EventMediators.scala | Scala | apache-2.0 | 1,622 |
import scala.reflect.runtime.universe._
import scala.reflect.macros.blackbox.Context
object Macros {
def impl[T: c.WeakTypeTag](c: Context)(foo: c.Expr[T]): c.Expr[Unit] = {
import c.universe._
reify { println(c.Expr[String](Literal(Constant(implicitly[c.WeakTypeTag[T]].toString))).splice) }
}
def foo[T](foo: T) = macro impl[T]
} | yusuke2255/dotty | tests/disabled/macro/run/macro-undetparams-macroitself/Impls_Macros_1.scala | Scala | bsd-3-clause | 347 |
// Databricks notebook source exported at Fri, 3 Jul 2015 23:14:51 UTC
1 - 1 + 3
// COMMAND ----------
lala | brkyvz/git-rest-api | notebooks/name with space.scala | Scala | apache-2.0 | 109 |
/*
* Copyright (C) 2017 HAT Data Exchange Ltd
* SPDX-License-Identifier: AGPL-3.0
*
* This file is part of the Hub of All Things project (HAT).
*
* HAT is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License
* as published by the Free Software Foundation, version 3 of
* the License.
*
* HAT is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
* the GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General
* Public License along with this program. If not, see
* <http://www.gnu.org/licenses/>.
*
* Written by Andrius Aucinas <[email protected]>
* 4 / 2017
*/
package org.hatdex.hat.api.service.monitoring
import org.hatdex.hat.api.models._
import org.hatdex.hat.api.service.richData.JsonDataTransformer
import org.joda.time.DateTime
import play.api.Logger
import play.api.libs.json.Reads._
import play.api.libs.json.{ JsArray, JsValue, Json, _ }
case class EndpointQueryException(
message: String = "",
cause: Throwable = None.orNull)
extends Exception(message, cause)
object EndpointSubscriberService {
private val logger = Logger(this.getClass)
def matchesBundle(
data: EndpointData,
bundle: EndpointDataBundle
): Boolean = {
val endpointQueries = bundle.flatEndpointQueries
.filter(_.endpoint == data.endpoint)
endpointQueries collectFirst {
case q if q.filters.isEmpty => true
case q if q.filters.exists(dataMatchesFilters(data, _)) => true
} getOrElse {
false
}
}
private implicit val dateReads: Reads[DateTime] =
JodaReads.jodaDateReads("yyyy-MM-dd'T'HH:mm:ssZ")
private def dataMatchesFilters(
data: EndpointData,
filters: Seq[EndpointQueryFilter]
): Boolean = {
logger.debug("Checking if data matches provided filters")
filters.exists { f =>
data.data
.transform(JsonDataTransformer.parseJsPath(f.field).json.pick)
.fold(
invalid = { _ =>
false
},
valid = { fieldData =>
val data = f.transformation map {
case _: FieldTransformation.Identity =>
fieldData
case trans: FieldTransformation.DateTimeExtract =>
Json.toJson(
dateTimeExtractPart(
fieldData.as[DateTime](dateReads),
trans.part
)
)
case trans: FieldTransformation.TimestampExtract =>
Json.toJson(
dateTimeExtractPart(
new DateTime(fieldData.as[Long] * 1000L),
trans.part
)
)
case trans =>
throw EndpointQueryException(
s"Invalid field transformation `${trans.getClass.getName}` for ongoing tracking"
)
} getOrElse {
fieldData
}
f.operator match {
case op: FilterOperator.In => jsContains(op.value, data)
case op: FilterOperator.Contains => jsContains(data, op.value)
case op: FilterOperator.Between =>
jsLessThanOrEqual(op.lower, data) && jsLessThanOrEqual(
data,
op.upper
)
case op =>
throw EndpointQueryException(
s"Invalid match operator `${op.getClass.getName}` for ongoing tracking"
)
}
}
)
}
}
private def dateTimeExtractPart(
d: DateTime,
part: String
): Int = {
part match {
case "milliseconds" => d.getMillisOfSecond
case "second" => d.getSecondOfMinute
case "minute" => d.getMinuteOfDay
case "hour" => d.getHourOfDay
case "day" => d.getDayOfMonth
case "week" => d.getWeekOfWeekyear
case "month" => d.getMonthOfYear
case "year" => d.getYear
case "decade" => d.getYear / 10
case "century" => d.getCenturyOfEra
case "dow" => d.getDayOfWeek
case "doy" => d.getDayOfYear
case "epoch" => (d.getMillis / 1000).toInt
}
}
private def jsContains(
contains: JsValue,
contained: JsValue
): Boolean = {
(contains, contained) match {
case (a: JsObject, b: JsObject) => b.fieldSet.subsetOf(a.fieldSet)
case (a: JsArray, b: JsArray) => a.value.containsSlice(b.value)
case (a: JsArray, b: JsValue) => a.value.contains(b)
case (a: JsValue, b: JsValue) => a == b
case _ => false
}
}
private def jsLessThanOrEqual(
a: JsValue,
b: JsValue
): Boolean = {
(a, b) match {
case (aa: JsNumber, bb: JsNumber) => aa.value <= bb.value
case (aa: JsString, bb: JsString) => aa.value <= bb.value
case _ => false
}
}
}
| Hub-of-all-Things/HAT2.0 | hat/app/org/hatdex/hat/api/service/monitoring/EndpointSubscriberService.scala | Scala | agpl-3.0 | 5,218 |
package drt.client.components
import diode.data.Pot
import drt.client.actions.Actions._
import drt.client.components.FixedPoints._
import drt.client.logger.{Logger, LoggerFactory}
import drt.client.modules.GoogleEventTracker
import drt.client.services.JSDateConversions._
import drt.client.services._
import drt.shared.Terminals.Terminal
import drt.shared._
import io.kinoplan.scalajs.react.material.ui.core.MuiButton
import japgolly.scalajs.react.component.Scala.{Component, Unmounted}
import japgolly.scalajs.react.vdom.all.onClick
import japgolly.scalajs.react.vdom.html_<^._
import japgolly.scalajs.react.vdom.{TagOf, all, html_<^}
import japgolly.scalajs.react.{CtorType, _}
import org.scalajs.dom.html.{Anchor, Div, Table}
import org.scalajs.dom.raw.HTMLElement
import uk.gov.homeoffice.drt.auth.LoggedInUser
import uk.gov.homeoffice.drt.auth.Roles.StaffEdit
import java.util.UUID
import scala.collection.immutable.NumericRange
import scala.util.Success
object TerminalStaffing {
val log: Logger = LoggerFactory.getLogger(getClass.getName)
val oneMinute = 60000L
case class Props(
terminal: Terminal,
potShifts: Pot[ShiftAssignments],
potFixedPoints: Pot[FixedPointAssignments],
potStaffMovements: Pot[StaffMovements],
airportConfig: AirportConfig,
loggedInUser: LoggedInUser,
viewMode: ViewMode
)
implicit val propsReuse: Reusability[Props] = Reusability.by(p => {
(p.potShifts.getOrElse(ShiftAssignments.empty), p.potFixedPoints.getOrElse(FixedPointAssignments.empty), p.potStaffMovements.getOrElse(StaffMovements.empty)).hashCode()
})
class Backend() {
def render(props: Props): VdomTagOf[Div] = <.div(
props.potShifts.render { shifts =>
props.potFixedPoints.render { fixedPoints =>
props.potStaffMovements.render { movements =>
val movementsForTheDay = movements.forDay(props.viewMode.time)
<.div(
<.div(^.className := "container",
<.div(^.className := "col-md-3", FixedPointsEditor(FixedPointsProps(FixedPointAssignments(fixedPoints.forTerminal(props.terminal)), props.airportConfig, props.terminal, props.loggedInUser))),
<.div(^.className := "col-md-4", movementsEditor(movementsForTheDay, props.terminal))
),
<.div(^.className := "container",
<.div(^.className := "col-md-10", staffOverTheDay(movementsForTheDay, shifts, props.terminal)))
)
}
}
}
)
def filterByTerminal(fixedPoints: String, terminalName: String): String = fixedPoints
.split("\\n")
.filter { line =>
val cells = line.split(",").map(cell => cell.trim())
cells(1) == terminalName
}
.mkString("\\n")
def staffOverTheDay(movements: Seq[StaffMovement],
shifts: ShiftAssignments,
terminalName: Terminal): VdomTagOf[Div] = {
val terminalShifts = ShiftAssignments(shifts.forTerminal(terminalName))
val staffWithShiftsAndMovementsAt = StaffMovements.terminalStaffAt(terminalShifts)(movements) _
<.div(
<.h2("Staff over the day"),
staffingTableHourPerColumn(terminalName, daysWorthOf15Minutes(SDate.midnightThisMorning()), staffWithShiftsAndMovementsAt)
)
}
def movementsEditor(movements: Seq[StaffMovement], terminalName: Terminal): VdomTagOf[Div] = {
val terminalMovements = movements.filter(_.terminal == terminalName)
<.div(^.className := "staff-movements-list", <.h2("Movements"), movementsListTagMod(terminalMovements, terminalName))
}
case class FixedPointsProps(fixedPoints: FixedPointAssignments,
airportConfig: AirportConfig,
terminal: Terminal,
loggedInUser: LoggedInUser)
case class FixedPointsState(text: String, originalValue: FixedPointAssignments, terminal: Terminal) {
def isUpdated: Boolean = currentValue != originalValue
def currentValue: FixedPointAssignments = {
val withTerminalName = addTerminalNameAndDate(text, terminal)
FixedPointAssignments(StaffAssignmentParser(withTerminalName).parsedAssignments.toList.collect { case Success(sa) => sa })
}
}
object FixedPointsEditor {
val component: Component[FixedPointsProps, FixedPointsState, Unit, CtorType.Props] = ScalaComponent.builder[FixedPointsProps]("FixedPointsEditor")
.initialStateFromProps { props =>
FixedPointsState(
StaffAssignmentHelper.fixedPointsFormat(props.fixedPoints),
props.fixedPoints,
props.terminal)
}
.renderPS((scope, props, state) => {
val defaultExamples = Seq("Roving Officer, 00:00, 23:59, 1")
val examples = if (props.airportConfig.fixedPointExamples.nonEmpty)
props.airportConfig.fixedPointExamples
else
defaultExamples
log.info(s"Rendering fixed points form")
<.div(
<.h2("Miscellaneous Staff"),
if (props.loggedInUser.roles.contains(StaffEdit)) {
<.div(
<.p("One entry per line with values separated by commas, e.g.:"),
<.pre(<.div(examples.map(line => <.div(line)).toTagMod)),
<.textarea(^.defaultValue := state.text, ^.className := "staffing-editor",
^.onChange ==> ((e: ReactEventFromInput) => {
val newRawFixedPoints = e.target.value
scope.modState(_.copy(text = newRawFixedPoints))
})),
MuiButton(variant = MuiButton.Variant.contained, color = MuiButton.Color.primary)(all.disabled := !state.isUpdated, onClick --> {
GoogleEventTracker.sendEvent(props.terminal.toString, "Save Fixed Points", "")
scope.modState(
_.copy(originalValue = state.currentValue),
Callback(SPACircuit.dispatch(SaveFixedPoints(state.currentValue, props.terminal))))
}, "Save changes")
)
}
else <.pre(state.text, ^.className := "staffing-editor")
)
}).build
def apply(props: FixedPointsProps): Unmounted[FixedPointsProps, FixedPointsState, Unit] = component(props)
}
def daysWorthOf15Minutes(startOfDay: SDateLike): NumericRange[Long] = {
val timeMinPlusOneDay = startOfDay.addDays(1)
startOfDay.millisSinceEpoch until timeMinPlusOneDay.millisSinceEpoch by (oneMinute * 15)
}
def staffingTableHourPerColumn(terminalName: Terminal,
daysWorthOf15Minutes: NumericRange[Long],
staffWithShiftsAndMovements: (Terminal, SDateLike) => Int): VdomTagOf[Table] =
<.table(
^.className := "table table-striped table-xcondensed table-sm",
<.tbody(
daysWorthOf15Minutes.grouped(16).flatMap {
hoursWorthOf15Minutes =>
Seq(
<.tr(^.key := s"hr-${hoursWorthOf15Minutes.headOption.getOrElse("empty")}", {
hoursWorthOf15Minutes.map((t: Long) => {
val d = SDate(t)
val display = f"${d.getHours()}%02d:${d.getMinutes()}%02d"
<.th(^.key := t, display)
}).toTagMod
}),
<.tr(^.key := s"vr-${hoursWorthOf15Minutes.headOption.getOrElse("empty")}",
hoursWorthOf15Minutes.map(t => {
<.td(^.key := t, s"${staffWithShiftsAndMovements(terminalName, SDate(t))}")
}).toTagMod
))
}.toTagMod
)
)
}
def movementsListTagMod(terminalMovements: Seq[StaffMovement], terminalName: Terminal): TagOf[HTMLElement] = {
if (terminalMovements.nonEmpty)
<.ul(^.className := "list-unstyled", movementsLiElements(terminalMovements, terminalName).toTagMod)
else
<.p("No movements recorded")
}
def movementsLiElements(terminalMovements: Seq[StaffMovement], terminalName: Terminal): Seq[TagMod] = sortedMovements(terminalMovements).map {
case (_, movementPair) =>
labelAndLink(terminalName, movementPair) match {
case Some((label, link)) => <.li(link, " ", <.span(^.`class` := "movement-display", label))
case None => TagMod()
}
}
def sortedMovements(terminalMovements: Seq[StaffMovement]): Seq[(UUID, Seq[StaffMovement])] = terminalMovements
.groupBy(_.uUID)
.toSeq
.sortBy {
case (_, head :: _) => head.time.millisSinceEpoch
case (_, _) => 0L
}
def labelAndLink(terminalName: Terminal,
movementPair: Seq[StaffMovement]): Option[(String, html_<^.VdomTagOf[Anchor])] =
movementPair
.toList
.sortBy(_.time.millisSinceEpoch) match {
case first :: second :: Nil => Option(Tuple2(MovementDisplay.displayPair(first, second), removeLink(terminalName, first)))
case mm :: Nil => Option(Tuple2(MovementDisplay.displaySingle(mm), removeLink(terminalName, mm)))
case x =>
log.info(s"didn't get a pair: $x")
None
}
def removeLink(terminal: Terminal, movement: StaffMovement): VdomTagOf[Anchor] =
<.a(Icon.remove, ^.key := movement.uUID.toString, ^.onClick ==> ((_: ReactEventFromInput) =>
Callback {
GoogleEventTracker.sendEvent(terminal.toString, "Remove Staff Movement", movement.copy(createdBy = None).toString)
SPACircuit.dispatch(RemoveStaffMovements(movement.uUID))
}))
def apply(props: Props): VdomElement = component(props)
private val component = ScalaComponent.builder[Props]("TerminalStaffing")
.renderBackend[Backend]
.configure(Reusability.shouldComponentUpdate)
.build
object MovementDisplay {
def toCsv(movement: StaffMovement): String = {
s"${movement.terminal}, ${movement.reason}, ${displayDate(movement.time)}, ${displayTime(movement.time)}, ${movement.delta} staff"
}
def displayPair(start: StaffMovement, end: StaffMovement): String = {
val startDate = displayDate(start.time)
val endDate = displayDate(end.time)
val startDateForDisplay = if (startDate != displayDate(SDate.now().millisSinceEpoch)) startDate else ""
val endDateForDisplay = if (startDate != endDate) endDate else ""
val reasonForDisplay = start.reason.replace(" start", "") match {
case "" => " (no reason given) "
case r => r
}
val createdBy = start.createdBy.getOrElse("unknown")
s"${start.delta} @ $startDateForDisplay ${displayTime(start.time)} -> $endDateForDisplay ${displayTime(end.time)} $reasonForDisplay by $createdBy"
}
def displaySingle(movement: StaffMovement): String = {
val startDate = displayDate(movement.time)
val startDateForDisplay = if (startDate != displayDate(SDate.now().millisSinceEpoch)) startDate else ""
val reasonForDisplay = movement.reason.replace(" start", "") match {
case "" => " - "
case r => r
}
val createdBy = movement.createdBy.getOrElse("unknown")
s"${movement.delta} @ $startDateForDisplay ${displayTime(movement.time)} -> ongoing $reasonForDisplay by $createdBy"
}
def displayTime(time: MilliDate): String = SDate(time).toHoursAndMinutes
def displayDate(time: MilliDate): String = SDate(time).ddMMyyString
}
}
object FixedPoints {
def removeTerminalNameAndDate(rawFixedPoints: String): String = {
val lines = rawFixedPoints.split("\\n").toList.map(line => {
val withTerminal = line.split(",").toList.map(_.trim)
val withOutTerminal = withTerminal match {
case fpName :: _ :: _ :: tail => fpName :: tail
case _ => Nil
}
withOutTerminal.mkString(", ")
})
lines.mkString("\\n")
}
def addTerminalNameAndDate(rawFixedPoints: String, terminalName: Terminal): String = {
val today: SDateLike = SDate.midnightThisMorning()
val todayString = today.ddMMyyString
val lines = rawFixedPoints.split("\\n").toList.map(line => {
val withoutTerminal = line.split(",").toList.map(_.trim)
val withTerminal = withoutTerminal match {
case fpName :: tail => fpName :: terminalName.toString :: todayString :: tail
case _ => Nil
}
withTerminal.mkString(", ")
})
lines.mkString("\\n")
}
}
| UKHomeOffice/drt-scalajs-spa-exploration | client/src/main/scala/drt/client/components/TerminalStaffing.scala | Scala | apache-2.0 | 12,547 |
/*
* Copyright 2021 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package uk.gov.hmrc.ct.computations
import org.scalatest.{Matchers, WordSpec}
import org.scalatestplus.mockito.MockitoSugar
import uk.gov.hmrc.ct.BoxValidationFixture
import uk.gov.hmrc.ct.computations.retriever.ComputationsBoxRetriever
class CP19Spec extends WordSpec with MockitoSugar with Matchers with BoxValidationFixture[ComputationsBoxRetriever] {
val boxRetriever = mock[ComputationsBoxRetriever]
testBoxIsZeroOrPositive("CP19", CP19.apply)
}
| hmrc/ct-calculations | src/test/scala/uk/gov/hmrc/ct/computations/CP19Spec.scala | Scala | apache-2.0 | 1,065 |
trait Monoid[M] {
def combine(m1: M, m2: M): M
def empty: M
} | hmemcpy/milewski-ctfp-pdf | src/content/1.3/code/scala/snippet01.scala | Scala | gpl-3.0 | 65 |
/*
* Copyright (C) 2016-2019 Lightbend Inc. <https://www.lightbend.com>
*/
package com.lightbend.lagom.internal.testkit
import java.util
import java.util.Collections
import akka.annotation.InternalApi
import com.google.inject.Binder
import com.google.inject.Module
import com.google.inject.TypeLiteral
import com.lightbend.lagom.javadsl.testkit.ServiceTest
import play.api.routing.Router
/**
* Provides an empty binding for [[util.List[Router]]]. Some of our tests make user of [[ServiceTest]]
* without using [[com.lightbend.lagom.javadsl.server.ServiceGuiceSupport]]. For those tests, it's need to add
* and extra binding of the list of Routers
*/
@InternalApi
object EmptyAdditionalRoutersModule extends Module {
override def configure(binder: Binder): Unit =
binder.bind(new TypeLiteral[util.List[Router]]() {}).toInstance(Collections.emptyList[Router])
def instance(): EmptyAdditionalRoutersModule.type = EmptyAdditionalRoutersModule
}
| rcavalcanti/lagom | testkit/javadsl/src/main/scala/com/lightbend/lagom/internal/testkit/EmptyAdditionalRoutersModule.scala | Scala | apache-2.0 | 960 |
package component.security.crypto
class CryptoService(private val encryptor: Encryptor, private val decryptor: Decryptor, private val crypto: Crypto) {
def sign(message: String): String = crypto.sign(message)
def generateRandomUUID = crypto.generateRandomUUID
def encryptAndSignData(data: String): String = {
encryptor.encryptAndSignData(data)
}
def encryptAndSignData(data: Array[Byte]): String = {
encryptor.encryptAndSignData(data)
}
def encryptAndSignTimeSensitiveData(data: String): String = {
encryptor.encryptAndSignTimeSensitiveData(data)
}
def encryptAndSignTimeSensitiveData(data: Array[Byte]): String = {
encryptor.encryptAndSignTimeSensitiveData(data)
}
def verifySignatureAndDecryptData(data: String): Option[String] = {
decryptor.verifySignatureAndDecryptData(data)
}
def verifySignatureAndDecryptTimeSensitiveData(data: String): Option[String] = {
decryptor.verifySignatureAndDecryptTimeSensitiveData(data)
}
}
| onurzdg/spray-app | src/main/scala/component/security/crypto/CryptoService.scala | Scala | apache-2.0 | 987 |
package views.html.team
import controllers.routes
import play.api.data.Form
import lila.api.Context
import lila.app.templating.Environment._
import lila.app.ui.ScalatagsTemplate._
import lila.common.String.html.richText
object request {
import trans.team._
def requestForm(t: lila.team.Team, form: Form[_])(implicit ctx: Context) = {
val title = s"${joinTeam.txt()} ${t.name}"
views.html.base.layout(
title = title,
moreCss = cssTag("team")
) {
main(cls := "page-menu page-small")(
bits.menu("requests".some),
div(cls := "page-menu__content box box-pad")(
h1(title),
p(style := "margin:2em 0")(richText(t.description)),
postForm(cls := "form3", action := routes.Team.requestCreate(t.id))(
!t.open ?? frag(
form3.group(form("message"), trans.message())(form3.textarea(_)()),
p(willBeReviewed())
),
t.password.nonEmpty ?? form3.passwordModified(form("password"), entryCode())(
autocomplete := "new-password"
),
form3.globalError(form),
form3.actions(
a(href := routes.Team.show(t.slug))(trans.cancel()),
form3.submit(joinTeam())
)
)
)
)
}
}
def all(requests: List[lila.team.RequestWithUser])(implicit ctx: Context) = {
val title = xJoinRequests.pluralSameTxt(requests.size)
bits.layout(title = title) {
main(cls := "page-menu")(
bits.menu("requests".some),
div(cls := "page-menu__content box box-pad")(
h1(title),
list(requests, none)
)
)
}
}
private[team] def list(requests: List[lila.team.RequestWithUser], t: Option[lila.team.Team])(implicit
ctx: Context
) =
table(cls := "slist requests @if(t.isEmpty){all}else{for-team} datatable")(
tbody(
requests.map { request =>
tr(
if (t.isEmpty) td(userLink(request.user), " ", teamLink(request.team))
else td(userLink(request.user)),
td(richText(request.message)),
td(momentFromNow(request.date)),
td(cls := "process")(
postForm(cls := "process-request", action := routes.Team.requestProcess(request.id))(
input(
tpe := "hidden",
name := "url",
value := t.fold(routes.Team.requests)(te => routes.Team.show(te.id))
),
button(name := "process", cls := "button button-empty button-red", value := "decline")(
trans.decline()
),
button(name := "process", cls := "button button-green", value := "accept")(trans.accept())
)
)
)
}
)
)
}
| luanlv/lila | app/views/team/request.scala | Scala | mit | 2,835 |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package wvlet.airframe.lifecycle
import java.util.concurrent.atomic.AtomicInteger
import wvlet.airframe.AirframeException.{MULTIPLE_SHUTDOWN_FAILURES, SHUTDOWN_FAILURE}
import wvlet.airframe.{bind, newDesign, newSilentDesign}
import wvlet.airspec.AirSpec
import wvlet.log.{LogLevel, LogSupport, Logger}
import wvlet.airframe._
class Counter extends LogSupport {
val initialized = new AtomicInteger(0)
val injected = new AtomicInteger(0)
val shutdowned = new AtomicInteger(0)
val started = new AtomicInteger(0)
}
trait CounterUser {
val counter1 = bind[CounterService]
val counter2 = bind[CounterService]
}
trait CounterService extends LogSupport {
val counterService = bind[Counter]
.onInit { c =>
debug(s"init: ${c.initialized.get()}")
c.initialized.incrementAndGet()
}
.onInject { c =>
debug(s"injected: ${c.injected.get()}")
c.injected.incrementAndGet()
}
.onStart { c =>
debug(s"start: ${c.started.get()}")
c.started.incrementAndGet()
}
.onShutdown { c =>
debug(s"shutdown: ${c.shutdowned.get()}")
c.shutdowned.incrementAndGet()
}
def initCount = counterService.initialized.get()
def injectCount = counterService.injected.get()
def startCount = counterService.started.get()
def shutdownCount = counterService.shutdowned.get()
}
trait User1 extends CounterService
trait User2 extends CounterService
trait LifeCycleOrder {
val t = new AtomicInteger(0)
var init = 0
var start = 0
var preShutdown = 0
var shutdown = 0
val v = bind[Int] { 0 }
.onInit { x => init = t.incrementAndGet() }
.onStart { x => start = t.incrementAndGet() }
.beforeShutdown { x => preShutdown = t.incrementAndGet() }
.onShutdown { x => shutdown = t.incrementAndGet() }
}
/**
*/
class LifeCycleManagerTest extends AirSpec {
test("call init hook") {
val c = newSilentDesign.bind[CounterService].toSingleton.newSession.build[CounterService]
c.initCount shouldBe 1
}
test("call lifecycle hooks properly for singleton") {
val session = newSilentDesign.bind[CounterService].toSingleton.newSession
val multiCounter = session.build[CounterUser]
multiCounter.counter1 shouldBeTheSameInstanceAs (multiCounter.counter2)
multiCounter.counter1.initCount shouldBe 1
multiCounter.counter1.injectCount shouldBe 1
multiCounter.counter1.startCount shouldBe 0
multiCounter.counter1.shutdownCount shouldBe 0
session.start
multiCounter.counter1.initCount shouldBe 1
multiCounter.counter1.injectCount shouldBe 1
multiCounter.counter1.startCount shouldBe 1
multiCounter.counter1.shutdownCount shouldBe 0
session.shutdown
multiCounter.counter1.initCount shouldBe 1
multiCounter.counter1.injectCount shouldBe 1
multiCounter.counter1.startCount shouldBe 1
multiCounter.counter1.shutdownCount shouldBe 1
}
test("start and shutdown only once for singleton referenced multiple times") {
val session = newSilentDesign.bind[Counter].toSingleton.newSession
val u1 = session.build[User1]
val u2 = session.build[User2]
// Shoud have the same service instance
u1.counterService shouldBeTheSameInstanceAs (u2.counterService)
session.start
session.shutdown
// Counter should be initialized only once
u1.initCount shouldBe 1
u2.initCount shouldBe 1
u1.injectCount shouldBe 2
u2.injectCount shouldBe 2
// Counter also should be started only once
u1.startCount shouldBe 1
u2.startCount shouldBe 1
// Shutdown should be called only once
u1.shutdownCount shouldBe 1
u2.shutdownCount shouldBe 1
}
test("run start hook when the session is already started") {
val session = newSilentDesign.newSession
var cs: CounterService = null
session.start {
cs = session.build[CounterService]
cs.initCount shouldBe 1
cs.startCount shouldBe 1
cs.shutdownCount shouldBe 0
}
cs.initCount shouldBe 1
cs.startCount shouldBe 1
cs.shutdownCount shouldBe 1
}
test("run start hook only once for singleton after session is started") {
val session = newSilentDesign.bind[Counter].toSingleton.newSession
var cs: CounterService = null
var cs2: CounterService = null
session.start {
cs = session.build[CounterService]
cs.initCount shouldBe 1
cs.injectCount shouldBe 1
cs.startCount shouldBe 1
cs.shutdownCount shouldBe 0
cs2 = session.build[CounterService]
cs2.initCount shouldBe 1
cs2.injectCount shouldBe 1 // CounterService is already instantiated
cs2.startCount shouldBe 1
cs2.shutdownCount shouldBe 0
}
cs.initCount shouldBe 1
cs.injectCount shouldBe 1
cs.startCount shouldBe 1
cs.shutdownCount shouldBe 1
cs.counterService shouldBeTheSameInstanceAs (cs2.counterService)
}
test("execute beforeShutdown hook") {
val session = newSilentDesign.newSession
val l = session.build[LifeCycleOrder]
session.start {}
l.init shouldBe 1
l.start shouldBe 2
l.preShutdown shouldBe 3
l.shutdown shouldBe 4
}
test("show life cycle log") {
newDesign.withSession { session =>
// Just show debug logs
}
val l = Logger("wvlet.airframe")
val current = l.getLogLevel
try {
l.setLogLevel(LogLevel.DEBUG)
newSilentDesign.withSession { session =>
// Show debug level session life cycle log
}
} finally {
l.setLogLevel(current)
}
}
class CloseExceptionTest extends AutoCloseable {
override def close(): Unit = {
throw new IllegalStateException("failure test")
}
}
test("handle exceptions in shutdown hooks") {
val e = intercept[SHUTDOWN_FAILURE] {
newSilentDesign.build[CloseExceptionTest] { x => }
}
e.getMessage.contains("failure test") shouldBe true
}
class MultipleShutdownExceptionTest(t: CloseExceptionTest) extends AutoCloseable {
override def close(): Unit = {
throw new IllegalStateException("failure 2")
}
}
test("handle multiple exceptions") {
val e = intercept[MULTIPLE_SHUTDOWN_FAILURES] {
newSilentDesign
.bind[CloseExceptionTest].toSingleton // Inner class needs to be defined where the outer context can be found
.build[MultipleShutdownExceptionTest] { x => }
}
debug(e)
e.causes.find(_.getMessage.contains("failure test")) shouldBe defined
e.causes.find(_.getMessage.contains("failure 2")) shouldBe defined
}
}
| wvlet/airframe | airframe-di/src/test/scala-2/wvlet/airframe/legacy/lifecycle/LifeCycleManagerTest.scala | Scala | apache-2.0 | 7,127 |
package org.scaladebugger.language.interpreters
import org.parboiled2.ParseError
import org.scaladebugger.language.models
import scala.util.{Failure, Success, Try}
/**
* Contains built-in functions for the debugger interpreter.
*/
trait DebuggerInterpreterBuiltins { this: DebuggerInterpreter =>
/** Mark parameter as required. */
@inline private def R(name: String, doc: String = "") =
(name, "Required" + (if (doc.nonEmpty) " ~ " + doc else ""))
/** Mark parameter as optional. */
@inline private def O(name: String, doc: String = "") =
(name, "Optional" + (if (doc.nonEmpty) " ~ " + doc else ""))
// Interpreting
private lazy val evalCodeIdentifier = {
val (name, doc) = R("code", "text to be parsed and evaluated")
models.Identifier(name, Some(doc))
}
this.bindFunctionExpression("eval", models.NativeFunction(
Seq(evalCodeIdentifier),
(m, s) => toExpression(interpret(toBaseValue(m.getOrElse(
evalCodeIdentifier,
models.Undefined.toScalaValue
).asInstanceOf[models.Expression], s).toScalaValue.toString, s).get).get,
Some("""Evaluate the text as a local code snippet.""")
))
// Debugging
private lazy val parseCodeIdentifier = {
val (name, doc) = R("code", "text to be parsed")
models.Identifier(name, Some(doc))
}
this.bindFunctionExpression("parse", models.NativeFunction(
Seq(parseCodeIdentifier),
(m, s) => {
val code = toBaseValue(m.getOrElse(
parseCodeIdentifier,
models.Undefined
)).toScalaValue.toString
val results = parse(code)
val resultString = parseResultsToString(results, code)
toExpression(resultString).get
},
Some("""Parses the text, returning the AST as text.""")
))
// Side effects
this.bindFunctionWithParamDocs(
"print", Seq(R("text")), DefaultFunctions.Print(out),
"""Prints to standard out. Invoke using print("some text")."""
)
this.bindFunctionWithParamDocs(
"printErr", Seq(R("text")), DefaultFunctions.Print(err),
"""Prints to standard error. Invoke using printErr("some text")."""
)
// Mathematical operators
this.bindFunctionWithParamDocs(
"plusPlus", Seq(R("l"), R("r")), DefaultFunctions.PlusPlus,
"""Equivalent to l ++ r."""
)
this.bindFunctionWithParamDocs(
"plus", Seq(R("l"), R("r")), DefaultFunctions.Plus,
"""Equivalent to l + r."""
)
this.bindFunctionWithParamDocs(
"minus", Seq(R("l"), R("r")), DefaultFunctions.Minus,
"""Equivalent to l - r."""
)
this.bindFunctionWithParamDocs(
"multiply", Seq(R("l"), R("r")), DefaultFunctions.Multiply,
"""Equivalent to l * r."""
)
this.bindFunctionWithParamDocs(
"divide", Seq(R("l"), R("r")), DefaultFunctions.Divide,
"""Equivalent to l / r."""
)
this.bindFunctionWithParamDocs(
"modulus", Seq(R("l"), R("r")), DefaultFunctions.Modulus,
"""Equivalent to l % r."""
)
// Logical operators
this.bindFunctionWithParamDocs(
"lessThan", Seq(R("l"), R("r")), DefaultFunctions.LessThan,
"""Equivalent to l < r."""
)
this.bindFunctionWithParamDocs(
"lessThanEqual", Seq(R("l"), R("r")), DefaultFunctions.LessThanEqual,
"""Equivalent to l <= r."""
)
this.bindFunctionWithParamDocs(
"greaterThan", Seq(R("l"), R("r")), DefaultFunctions.GreaterThan,
"""Equivalent to l > r."""
)
this.bindFunctionWithParamDocs(
"greaterThanEqual", Seq(R("l"), R("r")), DefaultFunctions.GreaterThanEqual,
"""Equivalent to l >= r."""
)
this.bindFunctionWithParamDocs(
"equal", Seq(R("l"), R("r")), DefaultFunctions.Equal,
"""Equivalent to l == r."""
)
this.bindFunctionWithParamDocs(
"notEqual", Seq(R("l"), R("r")), DefaultFunctions.NotEqual,
"""Equivalent to l != r."""
)
private def parseResultsToString(results: Try[Seq[AnyRef]], input: String): String = results match {
case Success(r) => r.mkString(",")
case Failure(ex: ParseError) => ex.format(input)
case Failure(ex) => ex.toString
}
}
| ensime/scala-debugger | scala-debugger-language/src/main/scala/org/scaladebugger/language/interpreters/DebuggerInterpreterBuiltins.scala | Scala | apache-2.0 | 4,017 |
/*
* Artificial Intelligence for Humans
* Volume 2: Nature Inspired Algorithms
* Java Version
* http://www.aifh.org
* http://www.jeffheaton.com
*
* Code repository:
* https://github.com/jeffheaton/aifh
*
* Copyright 2014 by Jeff Heaton
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For more information on Heaton Research copyrights, licenses
* and trademarks visit:
* http://www.heatonresearch.com/copyright
*/
package com.heatonresearch.aifh
/**
* Global constants for AIFH.
*/
object AIFH {
/**
* The default precision.
*/
val DEFAULT_PRECISION: Double = 0.0000001
}
| PeterLauris/aifh | vol2/vol2-scala-examples/src/main/scala/com/heatonresearch/aifh/AIFH.scala | Scala | apache-2.0 | 1,114 |
package test
import ildl._
object GCDTest {
object IntPairAsLong extends RigidTransformationDescription {
type High = (Int, Int)
type Repr = Long
def toRepr(pair: (Int, Int)): Long @high = ???
def toHigh(l: Long @high): (Int, Int) = ???
}
object LongAsFloat extends RigidTransformationDescription {
type High = Long
type Repr = Float
def toRepr(pair: Long): Float @high = ???
def toHigh(l: Float @high): Long = ???
}
adrt(IntPairAsLong) {
// the next scope will not activate, since we
// don't currently support cascading transforms:
adrt(LongAsFloat) {
val n1 = (1, 0)
val n3 = n1
}
}
}
| miniboxing/ildl-plugin | tests/correctness/resources/tests/scopes-conflicting-cascade.scala | Scala | bsd-3-clause | 664 |
package com.softwaremill.scalaval
object Validation {
type Errors = Map[String, Seq[String]]
def rule(field: String, haltOnFail: Boolean = false)(checkFn: => (Boolean, String)) = new RuleDefinition(field, haltOnFail, checkFn)
def validate(rules: RuleDefinition*): Result = validate(rules.toSeq)
def validate(rules: Iterable[RuleDefinition]): Result = {
import com.softwaremill.scalaval.util.InclusiveIterator._
val evaluatedRules = rules.toIterator.takeWhileInclusive(rule => !rule.haltOnFail || rule.result).toSeq
val errors = evaluatedRules.filter(!_.result).groupBy(_.field).mapValues(_.map(_.errorMsg))
Result(errors)
}
case class Result(errors: Errors) {
def whenOk[T](block: => T): Either[Errors, T] = if(errors.isEmpty) Right(block) else Left(errors)
}
class RuleDefinition(val field: String, val haltOnFail: Boolean, checkFn: => (Boolean, String)) {
private lazy val evaluatedRule = checkFn
lazy val result = evaluatedRule._1
lazy val errorMsg = evaluatedRule._2
}
} | mostr/scalaval | src/main/scala/com/softwaremill/scalaval/Validation.scala | Scala | apache-2.0 | 1,034 |
package org.sgine.ui
import style.Stylized
import org.powerscala.event.ActionEvent
import org.sgine.input.event.{KeyDownEvent, MousePressEvent}
import org.sgine.input.Key
/**
* @author Matt Hicks <[email protected]>
*/
class Button extends AbstractContainer with Stylized with Focusable {
def this(text: String) = {
this()
this.text := text
}
protected[ui] val background = new Scale9() {
override def name = "background"
mouseEnabled := false
size.algorithm := null
includeInLayout := false
}
addChild(background)
protected[ui] val label = new Label() {
override def name = "label"
mouseEnabled := false
location.z := 0.01
}
addChild(label)
val text = label.text
onChange(size.width, size.height) {
background.size(size.width(), size.height())
}
listeners.synchronous {
case evt: MousePressEvent => {
requestFocus()
fire(ActionEvent("click"))
}
case evt: KeyDownEvent if (evt.key == Key.Enter || evt.key == Key.Space) => fire(ActionEvent("click"))
}
} | Axiometry/sgine | ui/src/main/scala/org/sgine/ui/Button.scala | Scala | bsd-3-clause | 1,050 |
class B extends A { override def f = 2 }
trait T extends A
class C1 extends B with T {
def t1 = super[T].f
def t2 = super[B].f
def t3 = super.f
}
trait U1 extends A
trait U2 extends A
class C2 extends U1 with U2 { def t = super.f }
object Test extends App {
val c1 = new C1
assert(c1.t1 == 1)
assert(c1.t2 == 2)
assert(c1.t3 == 2)
val c2 = new C2
assert(c2.f == 1)
assert(c2.t == 1)
}
| scala/scala | test/files/run/sd143/Test.scala | Scala | apache-2.0 | 410 |
package bylt.core
import org.scalacheck.Gen
/**
* Created by attila on 4/14/2016.
*/
object ArbitraryType {
// Types
def lambdaTypeGen (depth : Int) : Gen [LambdaType] =
for {
arg <- typeExprGen (depth)
ret <- typeExprGen (depth)
} yield LambdaType (arg, ret)
def tupleTypeGen (depth : Int) : Gen [TupleType] =
for (elems <- Gen.nonEmptyListOf (typeExprGen (depth)))
yield TupleType (elems.toVector)
def recordTypeGen (depth : Int) : Gen [RecordType] = {
val fieldGen =
for {
name <- ArbitraryName.nameGen
tpe <- typeExprGen (depth)
} yield (name, tpe)
for (fields <- Gen.nonEmptyListOf (fieldGen))
yield RecordType (fields.toVector)
}
def sumTypeGen (depth : Int) : Gen [SumType] = {
val memberGen =
for {
name <- ArbitraryName.nameGen
tpe <- typeExprGen (depth)
} yield (name, tpe)
for (members <- Gen.nonEmptyListOf (memberGen))
yield SumType (members.toVector)
}
def typeDeclGen (depth : Int) : Gen [TypeDecl] =
if (depth > 1) {
Gen.frequency (
1 -> Gen.const (TopType ()),
1 -> Gen.const (BottomType ()),
3 -> Gen.const (UnitType ()),
5 -> lambdaTypeGen (depth - 1),
2 -> tupleTypeGen (depth - 1),
9 -> recordTypeGen (depth - 1),
7 -> sumTypeGen (depth - 1)
)
} else {
Gen.frequency (
1 -> Gen.const (TopType ()),
1 -> Gen.const (BottomType ()),
3 -> Gen.const (UnitType ())
)
}
lazy val typeRefGen : Gen [TypeRef] =
for (qname <- ArbitraryName.qnameGen) yield TypeRef (qname)
lazy val typeVarGen : Gen [TypeVar] =
for (name <- ArbitraryName.nameGen) yield TypeVar (name)
def typeApplyGen (depth : Int) : Gen [TypeApply] =
for {
qname <- ArbitraryName.qnameGen
args <- Gen.nonEmptyListOf (typeExprGen (depth))
} yield TypeApply (qname, args.toVector)
def typeLambdaGen (depth : Int) : Gen [TypeLambda] =
for {
args <- Gen.nonEmptyListOf (ArbitraryName.nameGen)
body <- typeExprGen (depth)
} yield TypeLambda (args.toVector, body)
def typeExprGen (depth : Int) : Gen [TypeExpr] =
if (depth > 1) {
Gen.frequency (
1 -> typeRefGen,
2 -> typeVarGen,
3 -> typeApplyGen (depth - 1),
1 -> typeLambdaGen (depth - 1),
7 -> typeDeclGen (depth)
)
} else {
Gen.frequency (
1 -> typeRefGen,
2 -> typeVarGen,
7 -> typeDeclGen (depth)
)
}
def typeExprGen : Gen [TypeExpr] = Gen.sized { size =>
typeExprGen (size / 10)
}
lazy val arb = org.scalacheck.Arbitrary (typeExprGen)
}
| ozmi/bylt | src/test/scala/bylt/core/ArbitraryType.scala | Scala | mit | 3,117 |
package drainprocessor.processor.kinesis
import java.net.InetAddress
import java.util.UUID
import java.util.concurrent.TimeUnit
import akka.actor.{ActorContext, ActorRef}
import akka.routing.ConsistentHashingRouter.ConsistentHashableEnvelope
import com.amazonaws.auth.AWSCredentials
import com.amazonaws.services.kinesis.clientlibrary.exceptions.{InvalidStateException, ShutdownException, ThrottlingException}
import com.amazonaws.services.kinesis.clientlibrary.interfaces.{IRecordProcessor, IRecordProcessorCheckpointer, IRecordProcessorFactory}
import com.amazonaws.services.kinesis.clientlibrary.lib.worker.{InitialPositionInStream, KinesisClientLibConfiguration, Worker}
import com.amazonaws.services.kinesis.clientlibrary.types.ShutdownReason
import com.amazonaws.services.kinesis.metrics.impl.NullMetricsFactory
import com.amazonaws.services.kinesis.model.Record
import com.github.vonnagy.service.container.log.LoggingAdapter
import com.github.vonnagy.service.container.metrics.{Meter, Counter}
import scala.collection.JavaConversions._
import scala.util.control.Breaks
/**
* Created by ivannagy on 4/12/15.
*/
class StreamReader(name: String, drainer: ActorRef)(implicit context: ActorContext) extends LoggingAdapter {
import context.system
val mybreaks = new Breaks
import mybreaks.{break, breakable}
val endpoint = context.system.settings.config.getString("log.processors.kinesis.endpoint")
val accessKey = context.system.settings.config.getString("log.processors.kinesis.access-key")
val accessSecret = context.system.settings.config.getString("log.processors.kinesis.access-secret")
val initPos = context.system.settings.config.getString(s"log.processors.kinesis.streams.$name.initial-position")
val maxRecords = context.system.settings.config.getInt(s"log.processors.kinesis.streams.$name.max-records")
val retries = context.system.settings.config.getInt("log.processors.kinesis.streams.checkpoint-retries")
val checkpointInterval = context.system.settings.config.getDuration("log.processors.kinesis.streams.checkpoint-interval", TimeUnit.MILLISECONDS)
val backoffTime = context.system.settings.config.getDuration("log.processors.kinesis.streams.backoff-time", TimeUnit.MILLISECONDS)
var worker: Option[Worker] = None
val credentials = new AWSCredentials {
def getAWSAccessKeyId: String = accessKey
def getAWSSecretKey: String = accessSecret
}
val receivedCount = Counter("processors.kinesis.receive")
val receivedMeter = Meter("processors.kinesis.receive.meter")
/**
* Never-ending processing loop over source stream.
*/
def start {
val workerId = InetAddress.getLocalHost().getCanonicalHostName() + ":" + UUID.randomUUID();
log.info("Using workerId: " + workerId)
val kinesisClientLibConfiguration = new KinesisClientLibConfiguration("drain-processor", name,
new CredentialsProvider(credentials), workerId)
.withKinesisEndpoint(endpoint)
.withInitialPositionInStream(InitialPositionInStream.valueOf(initPos))
.withMaxRecords(maxRecords)
log.info(s"Running: drain-processor.")
val rawEventProcessorFactory = new RawEventProcessorFactory()
worker = Some(new Worker(
rawEventProcessorFactory,
kinesisClientLibConfiguration,
new NullMetricsFactory()
))
context.dispatcher.execute(worker.get)
}
def stop: Unit = {
if (worker.isDefined)
worker.get.shutdown
}
// Factory needed by the Amazon Kinesis Consumer library to
// create a processor.
class RawEventProcessorFactory()
extends IRecordProcessorFactory {
@Override
def createProcessor: IRecordProcessor = {
return new RawEventProcessor();
}
}
// Process events from a Kinesis stream.
class RawEventProcessor()
extends IRecordProcessor {
private var kinesisShardId: String = _
private var nextCheckpointTimeInMillis: Long = _
override def initialize(shardId: String) = {
log.info("Initializing record processor for shard: " + shardId)
this.kinesisShardId = shardId
}
override def processRecords(records: java.util.List[Record],
checkpointer: IRecordProcessorCheckpointer) = {
log.debug(s"Processing ${records.size} records from $kinesisShardId")
processRecordsWithRetries(records)
if (System.currentTimeMillis() > nextCheckpointTimeInMillis) {
checkpoint(checkpointer)
nextCheckpointTimeInMillis =
System.currentTimeMillis + checkpointInterval
}
}
private def processRecordsWithRetries(records: java.util.List[Record]) = {
for (record <- records) {
try {
log.trace(s"Sequence number: ${record.getSequenceNumber}")
log.trace(s"Partition key: ${record.getPartitionKey}")
drainer ! ConsistentHashableEnvelope(message = record, hashKey = record.getPartitionKey)
receivedCount.incr
receivedMeter.mark
} catch {
case t: Throwable =>
log.error(s"Caught throwable while processing record $record", t)
}
}
}
override def shutdown(checkpointer: IRecordProcessorCheckpointer,
reason: ShutdownReason) = {
log.info(s"Shutting down record processor for shard: $kinesisShardId")
if (reason == ShutdownReason.TERMINATE) {
checkpoint(checkpointer)
}
}
private def checkpoint(checkpointer: IRecordProcessorCheckpointer) = {
log.info(s"Checkpointing shard $kinesisShardId")
breakable {
for (i <- 0 to retries - 1) {
try {
checkpointer.checkpoint()
break
} catch {
case se: ShutdownException =>
log.info("Caught shutdown exception, skipping checkpoint.", se)
case e: ThrottlingException =>
if (i >= (retries - 1)) {
log.info(s"Checkpoint failed after ${i + 1} attempts.", e)
} else {
log.info(s"Transient issue when checkpointing - attempt ${i + 1} of "
+ retries, e)
}
case e: InvalidStateException =>
log.info("Cannot save checkpoint to the DynamoDB table used by " +
"the Amazon Kinesis Client Library.", e)
}
Thread.sleep(backoffTime)
}
}
}
}
}
| vonnagy/drain-processor | src/main/scala/drainprocessor/processor/kinesis/StreamReader.scala | Scala | apache-2.0 | 6,378 |
package net.liftmodules.foboajs.lib
import net.liftweb.util._
/**
* ==Angular Material Design Script Helper==
*
* Angular Material Design script helper is a helper class that implements some commonly
* used material design component script functions.
*
*/
class AJSMaterialSH extends ScriptHelper {
//due to the use of default arguments we can't use overloaded functions (same name) as the compiler can't handle it
/**
* Wraps a lift message in a Material Design Toast component.
*
* @example
* In your "app js" file you you can set up the toast themes like this.
* {{{
* app.config(function(\\$mdThemingProvider) {
* :
* \\$mdThemingProvider.theme('error-toast').primaryPalette('red').accentPalette('orange');
* \\$mdThemingProvider.theme('warning-toast').primaryPalette('orange').accentPalette('deep-orange');
* \\$mdThemingProvider.theme('success-toast').primaryPalette('green');
* });
* }}}
*
* In your "app css" file you could set up something like this.
* {{{
* md-toast.md-success-toast-theme {
* background-color: #5cb85c;
* }
* md-toast.md-warning-toast-theme {
* background-color: #f0ad4e;
* }
* md-toast.md-error-toast-theme {
* background-color: #d9534f;
* }
* }}}
*
* In your Scala code you could wrap the message in a Toast like this.
* {{{
* import net.liftmodules.FoBoAJS.lib.{AJSMaterialSH => msh}
* :
* :
* S.notice(msh.mdToastWrapMsg(msg,theme="success-toast"))
* :
* S.error(msh.mdToastWrapMsg(msg,theme="error-toast"))
* }}}
*
*
* @param msg - The message to display.
* @param position - The position of the toast message. If not provided defaults to 'bottom left'
* @param theme - The color theme to use for the Toast component. If not provided some none
* theme specific color will be used.
* @return scala.xml.NodeSeq
*/
def mdToastWrapMsg(msg: String,
position: String = "bottom left",
theme: String = ""): scala.xml.NodeSeq = {
val initFunc =
"""showSimpleToast('%s','%s','%s');""".format(msg, position, theme)
return <div ng-controller='LiftMsgToastCtrl' ng-init={initFunc}></div>
}
/**
* Wraps a lift message in a Material Design Toast component.
*
* @example
* In your "app js" file you you can set up the toast themes like this.
* {{{
* app.config(function(\\$mdThemingProvider) {
* :
* \\$mdThemingProvider.theme('error-toast').primaryPalette('red').accentPalette('orange');
* \\$mdThemingProvider.theme('warning-toast').primaryPalette('orange').accentPalette('deep-orange');
* \\$mdThemingProvider.theme('success-toast').primaryPalette('green');
* });
* }}}
*
* In your "app css" file you could set up something like this.
* {{{
* md-toast.md-success-toast-theme {
* background-color: #5cb85c;
* }
* md-toast.md-warning-toast-theme {
* background-color: #f0ad4e;
* }
* md-toast.md-error-toast-theme {
* background-color: #d9534f;
* }
* }}}
*
* In your Scala code you could wrap the message in a Toast like this.
* {{{
* import net.liftmodules.FoBoAJS.lib.{AJSMaterialSH => msh}
* :
* :
* S.notice(msh.mdToastWrapNSMsg(msg,theme="success-toast"))
* :
* S.error(msh.mdToastWrapNSMsg(msg,theme="error-toast"))
* }}}
*
*
* @param msg - The message to display.
* @param position - The position of the toast message. If not provided defaults to 'bottom left'
* @param theme - The color theme to use for the Toast component. If not provided some none
* theme specific color will be used.
* @return scala.xml.NodeSeq
*/
def mdToastWrapNSMsg(msg: scala.xml.NodeSeq,
position: String = "bottom left",
theme: String = ""): scala.xml.NodeSeq = {
val initFunc =
"""showSimpleToast('%s','%s','%s');""".format(msg, position, theme)
return <div ng-controller='LiftMsgToastCtrl' ng-init={initFunc}></div>
}
/**
* Wraps a lift message in a Material Design Toast component.
*
* @example
* In your "app js" file you you can set up the toast themes like this.
* {{{
* app.config(function(\\$mdThemingProvider) {
* :
* \\$mdThemingProvider.theme('error-toast').primaryPalette('red').accentPalette('orange');
* \\$mdThemingProvider.theme('warning-toast').primaryPalette('orange').accentPalette('deep-orange');
* \\$mdThemingProvider.theme('success-toast').primaryPalette('green');
* });
* }}}
*
* In your "app css" file you could set up something like this.
* {{{
* md-toast.md-success-toast-theme {
* background-color: #5cb85c;
* }
* md-toast.md-warning-toast-theme {
* background-color: #f0ad4e;
* }
* md-toast.md-error-toast-theme {
* background-color: #d9534f;
* }
* }}}
*
* In your Scala code you could wrap the message in a Toast like this.
* {{{
* import net.liftmodules.FoBoAJS.lib.{AJSMaterialSH => msh}
* :
* :
* S.notice(msh.mdToastWrapFEMsg(vi,theme="success-toast"))
* :
* S.error(msh.mdToastWrapFEMsg(vi,theme="error-toast"))
* }}}
*
*
* @param vi - The message to display.
* @param position - The position of the toast message. If not provided defaults to 'bottom left'
* @param theme - The color theme to use for the Toast component. If not provided some none
* theme specific color will be used.
* @return scala.xml.NodeSeq
*/
def mdToastWrapFEMsg(vi: List[FieldError],
position: String = "bottom left",
theme: String = ""): scala.xml.NodeSeq = {
val msg: scala.xml.NodeSeq = (vi
.map { i =>
i.msg
})
.lift(0)
.getOrElse(<span>Something whent wrong!</span>);
val initFunc =
"""showSimpleToast('%s','%s','%s');""".format(msg, position, theme)
return <div ng-controller='LiftMsgToastCtrl' ng-init={initFunc}></div>
}
}
| karma4u101/FoBo | Angular/AngularJS-API/src/main/scala/net/liftmodules/foboajs/lib/AJSMaterialSH.scala | Scala | apache-2.0 | 6,205 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.livy
import org.scalatest.FunSuite
class EOLUtilsSuite extends FunSuite with LivyBaseUnitTestSuite {
test("check EOL") {
val s1 = "test\\r\\ntest"
assert(!EOLUtils.Mode.hasUnixEOL(s1))
assert(!EOLUtils.Mode.hasOldMacEOL(s1))
assert(EOLUtils.Mode.hasWindowsEOL(s1))
val s2 = "test\\ntest"
assert(EOLUtils.Mode.hasUnixEOL(s2))
assert(!EOLUtils.Mode.hasOldMacEOL(s2))
assert(!EOLUtils.Mode.hasWindowsEOL(s2))
val s3 = "test\\rtest"
assert(!EOLUtils.Mode.hasUnixEOL(s3))
assert(EOLUtils.Mode.hasOldMacEOL(s3))
assert(!EOLUtils.Mode.hasWindowsEOL(s3))
val s4 = "testtest"
assert(!EOLUtils.Mode.hasUnixEOL(s4))
assert(!EOLUtils.Mode.hasOldMacEOL(s4))
assert(!EOLUtils.Mode.hasWindowsEOL(s4))
}
test("convert EOL") {
val s1 = "test\\r\\ntest"
val s2 = "test\\ntest"
val s3 = "test\\rtest"
val s4 = "testtest"
assert(EOLUtils.convertToSystemEOL(s1) === EOLUtils.convertToSystemEOL(s2))
assert(EOLUtils.convertToSystemEOL(s1) === EOLUtils.convertToSystemEOL(s3))
assert(EOLUtils.convertToSystemEOL(s2) === EOLUtils.convertToSystemEOL(s3))
assert(EOLUtils.convertToSystemEOL(s4) === s4)
}
}
| ajbozarth/incubator-livy | core/src/test/scala/org/apache/livy/EOLUtilsSuite.scala | Scala | apache-2.0 | 2,006 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.graphx.lib
import org.apache.spark.graphx._
import scala.reflect.ClassTag
/**
* Computes shortest paths to the given set of landmark vertices, returning a graph where each
* vertex attribute is a map containing the shortest-path distance to each reachable landmark.
*/
object ShortestPaths {
/** Stores a map from the vertex id of a landmark to the distance to that landmark. */
type SPMap = Map[VertexId, Int]
private def makeMap(x: (VertexId, Int)*) = Map(x: _*)
private def incrementMap(spmap: SPMap): SPMap = spmap.map { case (v, d) => v -> (d + 1) }
private def addMaps(spmap1: SPMap, spmap2: SPMap): SPMap =
(spmap1.keySet ++ spmap2.keySet).map {
k => k -> math.min(spmap1.getOrElse(k, Int.MaxValue), spmap2.getOrElse(k, Int.MaxValue))
}.toMap
/**
* Computes shortest paths to the given set of landmark vertices.
*
* @tparam ED the edge attribute type (not used in the computation)
*
* @param graph the graph for which to compute the shortest paths
* @param landmarks the list of landmark vertex ids. Shortest paths will be computed to each
* landmark.
*
* @return a graph where each vertex attribute is a map containing the shortest-path distance to
* each reachable landmark vertex.
*/
def run[VD, ED: ClassTag](graph: Graph[VD, ED], landmarks: Seq[VertexId]): Graph[SPMap, ED] = {
val spGraph = graph.mapVertices { (vid, attr) =>
if (landmarks.contains(vid)) makeMap(vid -> 0) else makeMap()
}
val initialMessage = makeMap()
def vertexProgram(id: VertexId, attr: SPMap, msg: SPMap): SPMap = {
addMaps(attr, msg)
}
def sendMessage(edge: EdgeTriplet[SPMap, _]): Iterator[(VertexId, SPMap)] = {
val newAttr = incrementMap(edge.dstAttr)
if (edge.srcAttr != addMaps(newAttr, edge.srcAttr)) Iterator((edge.srcId, newAttr))
else Iterator.empty
}
Pregel(spGraph, initialMessage)(vertexProgram, sendMessage, addMaps)
}
}
| practice-vishnoi/dev-spark-1 | graphx/src/main/scala/org/apache/spark/graphx/lib/ShortestPaths.scala | Scala | apache-2.0 | 2,786 |
/**
*
* AndroidKeystore
* Ledger wallet
*
* Created by Pierre Pollastri on 11/09/15.
*
* The MIT License (MIT)
*
* Copyright (c) 2015 Ledger
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*
*/
package co.ledger.wallet.core.security
import java.math.BigInteger
import java.security.KeyStore
import java.security.KeyStore.PasswordProtection
import java.util.Calendar
import javax.security.auth.x500.X500Principal
import android.content.Context
import android.security.KeyPairGeneratorSpec
import co.ledger.wallet.core.crypto.Crypto
import scala.concurrent.{Promise, Future}
class AndroidKeystore(context: Context) extends Keystore(context) {
override protected def loadJavaKeyStore(passwordProtection: PasswordProtection): Future[JavaKeyStore] = {
Crypto.ensureSpongyIsRemoved()
val keystore = KeyStore.getInstance("AndroidKeyStore")
keystore.load(null)
Future.successful(keystore)
}
override def generateKey(alias: String): JavaKeyPair = {
Crypto.ensureSpongyIsRemoved()
val kpg = java.security.KeyPairGenerator.getInstance("RSA", "AndroidKeyStore")
val calendar = Calendar.getInstance()
val now = calendar.getTime
calendar.add(Calendar.YEAR, 100)
val end = calendar.getTime
kpg.initialize(
new KeyPairGeneratorSpec.Builder(context.getApplicationContext)
.setAlias(alias)
.setStartDate(now)
.setEndDate(end)
.setSerialNumber(BigInteger.valueOf(1))
.setSubject(new X500Principal("CN=Ledger"))
.build()
)
kpg.generateKeyPair()
}
}
| LedgerHQ/ledger-wallet-android | app/src/main/scala/co/ledger/wallet/core/security/AndroidKeystore.scala | Scala | mit | 2,587 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.deploy.history
import java.io.{File, FileNotFoundException, IOException}
import java.nio.file.Files
import java.util.{Date, ServiceLoader}
import java.util.concurrent.{ConcurrentHashMap, ExecutorService, Future, TimeUnit}
import java.util.zip.{ZipEntry, ZipOutputStream}
import scala.collection.JavaConverters._
import scala.collection.mutable
import scala.concurrent.ExecutionException
import scala.io.Source
import scala.util.Try
import scala.xml.Node
import com.fasterxml.jackson.annotation.JsonIgnore
import com.google.common.io.ByteStreams
import com.google.common.util.concurrent.MoreExecutors
import org.apache.hadoop.fs.{FileStatus, FileSystem, Path}
import org.apache.hadoop.hdfs.{DFSInputStream, DistributedFileSystem}
import org.apache.hadoop.hdfs.protocol.HdfsConstants
import org.apache.hadoop.security.AccessControlException
import org.fusesource.leveldbjni.internal.NativeDB
import org.apache.spark.{SecurityManager, SparkConf, SparkException}
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.internal.Logging
import org.apache.spark.internal.config._
import org.apache.spark.internal.config.History._
import org.apache.spark.internal.config.Status._
import org.apache.spark.internal.config.Tests.IS_TESTING
import org.apache.spark.internal.config.UI._
import org.apache.spark.io.CompressionCodec
import org.apache.spark.scheduler._
import org.apache.spark.scheduler.ReplayListenerBus._
import org.apache.spark.status._
import org.apache.spark.status.KVUtils._
import org.apache.spark.status.api.v1.{ApplicationAttemptInfo, ApplicationInfo}
import org.apache.spark.ui.SparkUI
import org.apache.spark.util.{Clock, SystemClock, ThreadUtils, Utils}
import org.apache.spark.util.kvstore._
/**
* A class that provides application history from event logs stored in the file system.
* This provider checks for new finished applications in the background periodically and
* renders the history application UI by parsing the associated event logs.
*
* == How new and updated attempts are detected ==
*
* - New attempts are detected in [[checkForLogs]]: the log dir is scanned, and any entries in the
* log dir whose size changed since the last scan time are considered new or updated. These are
* replayed to create a new attempt info entry and update or create a matching application info
* element in the list of applications.
* - Updated attempts are also found in [[checkForLogs]] -- if the attempt's log file has grown, the
* attempt is replaced by another one with a larger log size.
*
* The use of log size, rather than simply relying on modification times, is needed to
* address the following issues
* - some filesystems do not appear to update the `modtime` value whenever data is flushed to
* an open file output stream. Changes to the history may not be picked up.
* - the granularity of the `modtime` field may be 2+ seconds. Rapid changes to the FS can be
* missed.
*
* Tracking filesize works given the following invariant: the logs get bigger
* as new events are added. If a format was used in which this did not hold, the mechanism would
* break. Simple streaming of JSON-formatted events, as is implemented today, implicitly
* maintains this invariant.
*/
private[history] class FsHistoryProvider(conf: SparkConf, clock: Clock)
extends ApplicationHistoryProvider with Logging {
def this(conf: SparkConf) = {
this(conf, new SystemClock())
}
import FsHistoryProvider._
// Interval between safemode checks.
private val SAFEMODE_CHECK_INTERVAL_S = conf.get(History.SAFEMODE_CHECK_INTERVAL_S)
// Interval between each check for event log updates
private val UPDATE_INTERVAL_S = conf.get(History.UPDATE_INTERVAL_S)
// Interval between each cleaner checks for event logs to delete
private val CLEAN_INTERVAL_S = conf.get(History.CLEANER_INTERVAL_S)
// Number of threads used to replay event logs.
private val NUM_PROCESSING_THREADS = conf.get(History.NUM_REPLAY_THREADS)
private val logDir = conf.get(History.HISTORY_LOG_DIR)
private val historyUiAclsEnable = conf.get(History.HISTORY_SERVER_UI_ACLS_ENABLE)
private val historyUiAdminAcls = conf.get(History.HISTORY_SERVER_UI_ADMIN_ACLS)
private val historyUiAdminAclsGroups = conf.get(History.HISTORY_SERVER_UI_ADMIN_ACLS_GROUPS)
logInfo(s"History server ui acls " + (if (historyUiAclsEnable) "enabled" else "disabled") +
"; users with admin permissions: " + historyUiAdminAcls.mkString(",") +
"; groups with admin permissions" + historyUiAdminAclsGroups.mkString(","))
private val hadoopConf = SparkHadoopUtil.get.newConfiguration(conf)
// Visible for testing
private[history] val fs: FileSystem = new Path(logDir).getFileSystem(hadoopConf)
// Used by check event thread and clean log thread.
// Scheduled thread pool size must be one, otherwise it will have concurrent issues about fs
// and applications between check task and clean task.
private val pool = ThreadUtils.newDaemonSingleThreadScheduledExecutor("spark-history-task-%d")
// The modification time of the newest log detected during the last scan. Currently only
// used for logging msgs (logs are re-scanned based on file size, rather than modtime)
private val lastScanTime = new java.util.concurrent.atomic.AtomicLong(-1)
private val pendingReplayTasksCount = new java.util.concurrent.atomic.AtomicInteger(0)
private val storePath = conf.get(LOCAL_STORE_DIR).map(new File(_))
private val fastInProgressParsing = conf.get(FAST_IN_PROGRESS_PARSING)
// Visible for testing.
private[history] val listing: KVStore = storePath.map { path =>
val dbPath = Files.createDirectories(new File(path, "listing.ldb").toPath()).toFile()
Utils.chmod700(dbPath)
val metadata = new FsHistoryProviderMetadata(CURRENT_LISTING_VERSION,
AppStatusStore.CURRENT_VERSION, logDir.toString())
try {
open(dbPath, metadata)
} catch {
// If there's an error, remove the listing database and any existing UI database
// from the store directory, since it's extremely likely that they'll all contain
// incompatible information.
case _: UnsupportedStoreVersionException | _: MetadataMismatchException =>
logInfo("Detected incompatible DB versions, deleting...")
path.listFiles().foreach(Utils.deleteRecursively)
open(dbPath, metadata)
case dbExc: NativeDB.DBException =>
// Get rid of the corrupted listing.ldb and re-create it.
logWarning(s"Failed to load disk store $dbPath :", dbExc)
Utils.deleteRecursively(dbPath)
open(dbPath, metadata)
}
}.getOrElse(new InMemoryStore())
private val diskManager = storePath.map { path =>
new HistoryServerDiskManager(conf, path, listing, clock)
}
private val blacklist = new ConcurrentHashMap[String, Long]
// Visible for testing
private[history] def isBlacklisted(path: Path): Boolean = {
blacklist.containsKey(path.getName)
}
private def blacklist(path: Path): Unit = {
blacklist.put(path.getName, clock.getTimeMillis())
}
/**
* Removes expired entries in the blacklist, according to the provided `expireTimeInSeconds`.
*/
private def clearBlacklist(expireTimeInSeconds: Long): Unit = {
val expiredThreshold = clock.getTimeMillis() - expireTimeInSeconds * 1000
blacklist.asScala.retain((_, creationTime) => creationTime >= expiredThreshold)
}
private val activeUIs = new mutable.HashMap[(String, Option[String]), LoadedAppUI]()
/**
* Return a runnable that performs the given operation on the event logs.
* This operation is expected to be executed periodically.
*/
private def getRunner(operateFun: () => Unit): Runnable = {
new Runnable() {
override def run(): Unit = Utils.tryOrExit {
operateFun()
}
}
}
/**
* Fixed size thread pool to fetch and parse log files.
*/
private val replayExecutor: ExecutorService = {
if (!Utils.isTesting) {
ThreadUtils.newDaemonFixedThreadPool(NUM_PROCESSING_THREADS, "log-replay-executor")
} else {
MoreExecutors.sameThreadExecutor()
}
}
val initThread = initialize()
private[history] def initialize(): Thread = {
if (!isFsInSafeMode()) {
startPolling()
null
} else {
startSafeModeCheckThread(None)
}
}
private[history] def startSafeModeCheckThread(
errorHandler: Option[Thread.UncaughtExceptionHandler]): Thread = {
// Cannot probe anything while the FS is in safe mode, so spawn a new thread that will wait
// for the FS to leave safe mode before enabling polling. This allows the main history server
// UI to be shown (so that the user can see the HDFS status).
val initThread = new Thread(new Runnable() {
override def run(): Unit = {
try {
while (isFsInSafeMode()) {
logInfo("HDFS is still in safe mode. Waiting...")
val deadline = clock.getTimeMillis() +
TimeUnit.SECONDS.toMillis(SAFEMODE_CHECK_INTERVAL_S)
clock.waitTillTime(deadline)
}
startPolling()
} catch {
case _: InterruptedException =>
}
}
})
initThread.setDaemon(true)
initThread.setName(s"${getClass().getSimpleName()}-init")
initThread.setUncaughtExceptionHandler(errorHandler.getOrElse(
new Thread.UncaughtExceptionHandler() {
override def uncaughtException(t: Thread, e: Throwable): Unit = {
logError("Error initializing FsHistoryProvider.", e)
System.exit(1)
}
}))
initThread.start()
initThread
}
private def startPolling(): Unit = {
diskManager.foreach(_.initialize())
// Validate the log directory.
val path = new Path(logDir)
try {
if (!fs.getFileStatus(path).isDirectory) {
throw new IllegalArgumentException(
"Logging directory specified is not a directory: %s".format(logDir))
}
} catch {
case f: FileNotFoundException =>
var msg = s"Log directory specified does not exist: $logDir"
if (logDir == DEFAULT_LOG_DIR) {
msg += " Did you configure the correct one through spark.history.fs.logDirectory?"
}
throw new FileNotFoundException(msg).initCause(f)
}
// Disable the background thread during tests.
if (!conf.contains(IS_TESTING)) {
// A task that periodically checks for event log updates on disk.
logDebug(s"Scheduling update thread every $UPDATE_INTERVAL_S seconds")
pool.scheduleWithFixedDelay(
getRunner(() => checkForLogs()), 0, UPDATE_INTERVAL_S, TimeUnit.SECONDS)
if (conf.get(CLEANER_ENABLED)) {
// A task that periodically cleans event logs on disk.
pool.scheduleWithFixedDelay(
getRunner(() => cleanLogs()), 0, CLEAN_INTERVAL_S, TimeUnit.SECONDS)
}
if (conf.contains(DRIVER_LOG_DFS_DIR) && conf.get(DRIVER_LOG_CLEANER_ENABLED)) {
pool.scheduleWithFixedDelay(getRunner(() => cleanDriverLogs()),
0,
conf.get(DRIVER_LOG_CLEANER_INTERVAL),
TimeUnit.SECONDS)
}
} else {
logDebug("Background update thread disabled for testing")
}
}
override def getListing(): Iterator[ApplicationInfo] = {
// Return the listing in end time descending order.
listing.view(classOf[ApplicationInfoWrapper])
.index("endTime")
.reverse()
.iterator()
.asScala
.map(_.toApplicationInfo())
}
override def getApplicationInfo(appId: String): Option[ApplicationInfo] = {
try {
Some(load(appId).toApplicationInfo())
} catch {
case _: NoSuchElementException =>
None
}
}
override def getEventLogsUnderProcess(): Int = pendingReplayTasksCount.get()
override def getLastUpdatedTime(): Long = lastScanTime.get()
/**
* Split a comma separated String, filter out any empty items, and return a Sequence of strings
*/
private def stringToSeq(list: String): Seq[String] = {
list.split(',').map(_.trim).filter(!_.isEmpty)
}
override def getAppUI(appId: String, attemptId: Option[String]): Option[LoadedAppUI] = {
val app = try {
load(appId)
} catch {
case _: NoSuchElementException =>
return None
}
val attempt = app.attempts.find(_.info.attemptId == attemptId).orNull
if (attempt == null) {
return None
}
val conf = this.conf.clone()
val secManager = new SecurityManager(conf)
secManager.setAcls(historyUiAclsEnable)
// make sure to set admin acls before view acls so they are properly picked up
secManager.setAdminAcls(historyUiAdminAcls ++ stringToSeq(attempt.adminAcls.getOrElse("")))
secManager.setViewAcls(attempt.info.sparkUser, stringToSeq(attempt.viewAcls.getOrElse("")))
secManager.setAdminAclsGroups(historyUiAdminAclsGroups ++
stringToSeq(attempt.adminAclsGroups.getOrElse("")))
secManager.setViewAclsGroups(stringToSeq(attempt.viewAclsGroups.getOrElse("")))
val kvstore = try {
diskManager match {
case Some(sm) =>
loadDiskStore(sm, appId, attempt)
case _ =>
createInMemoryStore(attempt)
}
} catch {
case _: FileNotFoundException =>
return None
}
val ui = SparkUI.create(None, new HistoryAppStatusStore(conf, kvstore), conf, secManager,
app.info.name, HistoryServer.getAttemptURI(appId, attempt.info.attemptId),
attempt.info.startTime.getTime(), attempt.info.appSparkVersion)
loadPlugins().foreach(_.setupUI(ui))
val loadedUI = LoadedAppUI(ui)
synchronized {
activeUIs((appId, attemptId)) = loadedUI
}
Some(loadedUI)
}
override def getEmptyListingHtml(): Seq[Node] = {
<p>
Did you specify the correct logging directory? Please verify your setting of
<span style="font-style:italic">spark.history.fs.logDirectory</span>
listed above and whether you have the permissions to access it.
<br/>
It is also possible that your application did not run to
completion or did not stop the SparkContext.
</p>
}
override def getConfig(): Map[String, String] = {
val safeMode = if (isFsInSafeMode()) {
Map("HDFS State" -> "In safe mode, application logs not available.")
} else {
Map()
}
Map("Event log directory" -> logDir.toString) ++ safeMode
}
override def stop(): Unit = {
try {
if (initThread != null && initThread.isAlive()) {
initThread.interrupt()
initThread.join()
}
Seq(pool, replayExecutor).foreach { executor =>
executor.shutdown()
if (!executor.awaitTermination(5, TimeUnit.SECONDS)) {
executor.shutdownNow()
}
}
} finally {
activeUIs.foreach { case (_, loadedUI) => loadedUI.ui.store.close() }
activeUIs.clear()
listing.close()
}
}
override def onUIDetached(appId: String, attemptId: Option[String], ui: SparkUI): Unit = {
val uiOption = synchronized {
activeUIs.remove((appId, attemptId))
}
uiOption.foreach { loadedUI =>
loadedUI.lock.writeLock().lock()
try {
loadedUI.ui.store.close()
} finally {
loadedUI.lock.writeLock().unlock()
}
diskManager.foreach { dm =>
// If the UI is not valid, delete its files from disk, if any. This relies on the fact that
// ApplicationCache will never call this method concurrently with getAppUI() for the same
// appId / attemptId.
dm.release(appId, attemptId, delete = !loadedUI.valid)
}
}
}
/**
* Builds the application list based on the current contents of the log directory.
* Tries to reuse as much of the data already in memory as possible, by not reading
* applications that haven't been updated since last time the logs were checked.
*/
private[history] def checkForLogs(): Unit = {
try {
val newLastScanTime = clock.getTimeMillis()
logDebug(s"Scanning $logDir with lastScanTime==$lastScanTime")
val updated = Option(fs.listStatus(new Path(logDir))).map(_.toSeq).getOrElse(Nil)
.filter { entry =>
!entry.isDirectory() &&
// FsHistoryProvider used to generate a hidden file which can't be read. Accidentally
// reading a garbage file is safe, but we would log an error which can be scary to
// the end-user.
!entry.getPath().getName().startsWith(".") &&
!isBlacklisted(entry.getPath)
}
.filter { entry =>
try {
val info = listing.read(classOf[LogInfo], entry.getPath().toString())
if (info.appId.isDefined) {
// If the SHS view has a valid application, update the time the file was last seen so
// that the entry is not deleted from the SHS listing. Also update the file size, in
// case the code below decides we don't need to parse the log.
listing.write(info.copy(lastProcessed = newLastScanTime, fileSize = entry.getLen()))
}
if (shouldReloadLog(info, entry)) {
if (info.appId.isDefined && fastInProgressParsing) {
// When fast in-progress parsing is on, we don't need to re-parse when the
// size changes, but we do need to invalidate any existing UIs.
// Also, we need to update the `lastUpdated time` to display the updated time in
// the HistoryUI and to avoid cleaning the inprogress app while running.
val appInfo = listing.read(classOf[ApplicationInfoWrapper], info.appId.get)
val attemptList = appInfo.attempts.map { attempt =>
if (attempt.info.attemptId == info.attemptId) {
new AttemptInfoWrapper(
attempt.info.copy(lastUpdated = new Date(newLastScanTime)),
attempt.logPath,
attempt.fileSize,
attempt.adminAcls,
attempt.viewAcls,
attempt.adminAclsGroups,
attempt.viewAclsGroups)
} else {
attempt
}
}
val updatedAppInfo = new ApplicationInfoWrapper(appInfo.info, attemptList)
listing.write(updatedAppInfo)
invalidateUI(info.appId.get, info.attemptId)
false
} else {
true
}
} else {
false
}
} catch {
case _: NoSuchElementException =>
// If the file is currently not being tracked by the SHS, add an entry for it and try
// to parse it. This will allow the cleaner code to detect the file as stale later on
// if it was not possible to parse it.
listing.write(LogInfo(entry.getPath().toString(), newLastScanTime, LogType.EventLogs,
None, None, entry.getLen()))
entry.getLen() > 0
}
}
.sortWith { case (entry1, entry2) =>
entry1.getModificationTime() > entry2.getModificationTime()
}
if (updated.nonEmpty) {
logDebug(s"New/updated attempts found: ${updated.size} ${updated.map(_.getPath)}")
}
val tasks = updated.flatMap { entry =>
try {
val task: Future[Unit] = replayExecutor.submit(new Runnable {
override def run(): Unit = mergeApplicationListing(entry, newLastScanTime, true)
}, Unit)
Some(task -> entry.getPath)
} catch {
// let the iteration over the updated entries break, since an exception on
// replayExecutor.submit (..) indicates the ExecutorService is unable
// to take any more submissions at this time
case e: Exception =>
logError(s"Exception while submitting event log for replay", e)
None
}
}
pendingReplayTasksCount.addAndGet(tasks.size)
// Wait for all tasks to finish. This makes sure that checkForLogs
// is not scheduled again while some tasks are already running in
// the replayExecutor.
tasks.foreach { case (task, path) =>
try {
task.get()
} catch {
case e: InterruptedException =>
throw e
case e: ExecutionException if e.getCause.isInstanceOf[AccessControlException] =>
// We don't have read permissions on the log file
logWarning(s"Unable to read log $path", e.getCause)
blacklist(path)
case e: Exception =>
logError("Exception while merging application listings", e)
} finally {
pendingReplayTasksCount.decrementAndGet()
}
}
// Delete all information about applications whose log files disappeared from storage.
// This is done by identifying the event logs which were not touched by the current
// directory scan.
//
// Only entries with valid applications are cleaned up here. Cleaning up invalid log
// files is done by the periodic cleaner task.
val stale = listing.view(classOf[LogInfo])
.index("lastProcessed")
.last(newLastScanTime - 1)
.asScala
.toList
stale.foreach { log =>
log.appId.foreach { appId =>
cleanAppData(appId, log.attemptId, log.logPath)
listing.delete(classOf[LogInfo], log.logPath)
}
}
lastScanTime.set(newLastScanTime)
} catch {
case e: Exception => logError("Exception in checking for event log updates", e)
}
}
private[history] def shouldReloadLog(info: LogInfo, entry: FileStatus): Boolean = {
var result = info.fileSize < entry.getLen
if (!result && info.logPath.endsWith(EventLoggingListener.IN_PROGRESS)) {
try {
result = Utils.tryWithResource(fs.open(entry.getPath)) { in =>
in.getWrappedStream match {
case dfsIn: DFSInputStream => info.fileSize < dfsIn.getFileLength
case _ => false
}
}
} catch {
case e: Exception =>
logDebug(s"Failed to check the length for the file : ${info.logPath}", e)
}
}
result
}
private def cleanAppData(appId: String, attemptId: Option[String], logPath: String): Unit = {
try {
val app = load(appId)
val (attempt, others) = app.attempts.partition(_.info.attemptId == attemptId)
assert(attempt.isEmpty || attempt.size == 1)
val isStale = attempt.headOption.exists { a =>
if (a.logPath != new Path(logPath).getName()) {
// If the log file name does not match, then probably the old log file was from an
// in progress application. Just return that the app should be left alone.
false
} else {
val maybeUI = synchronized {
activeUIs.remove(appId -> attemptId)
}
maybeUI.foreach { ui =>
ui.invalidate()
ui.ui.store.close()
}
diskManager.foreach(_.release(appId, attemptId, delete = true))
true
}
}
if (isStale) {
if (others.nonEmpty) {
val newAppInfo = new ApplicationInfoWrapper(app.info, others)
listing.write(newAppInfo)
} else {
listing.delete(classOf[ApplicationInfoWrapper], appId)
}
}
} catch {
case _: NoSuchElementException =>
}
}
override def writeEventLogs(
appId: String,
attemptId: Option[String],
zipStream: ZipOutputStream): Unit = {
/**
* This method compresses the files passed in, and writes the compressed data out into the
* [[OutputStream]] passed in. Each file is written as a new [[ZipEntry]] with its name being
* the name of the file being compressed.
*/
def zipFileToStream(file: Path, entryName: String, outputStream: ZipOutputStream): Unit = {
val fs = file.getFileSystem(hadoopConf)
val inputStream = fs.open(file, 1 * 1024 * 1024) // 1MB Buffer
try {
outputStream.putNextEntry(new ZipEntry(entryName))
ByteStreams.copy(inputStream, outputStream)
outputStream.closeEntry()
} finally {
inputStream.close()
}
}
val app = try {
load(appId)
} catch {
case _: NoSuchElementException =>
throw new SparkException(s"Logs for $appId not found.")
}
try {
// If no attempt is specified, or there is no attemptId for attempts, return all attempts
attemptId
.map { id => app.attempts.filter(_.info.attemptId == Some(id)) }
.getOrElse(app.attempts)
.map(_.logPath)
.foreach { log =>
zipFileToStream(new Path(logDir, log), log, zipStream)
}
} finally {
zipStream.close()
}
}
/**
* Replay the given log file, saving the application in the listing db.
*/
protected def mergeApplicationListing(
fileStatus: FileStatus,
scanTime: Long,
enableOptimizations: Boolean): Unit = {
val eventsFilter: ReplayEventsFilter = { eventString =>
eventString.startsWith(APPL_START_EVENT_PREFIX) ||
eventString.startsWith(APPL_END_EVENT_PREFIX) ||
eventString.startsWith(LOG_START_EVENT_PREFIX) ||
eventString.startsWith(ENV_UPDATE_EVENT_PREFIX)
}
val logPath = fileStatus.getPath()
val appCompleted = isCompleted(logPath.getName())
val reparseChunkSize = conf.get(END_EVENT_REPARSE_CHUNK_SIZE)
// Enable halt support in listener if:
// - app in progress && fast parsing enabled
// - skipping to end event is enabled (regardless of in-progress state)
val shouldHalt = enableOptimizations &&
((!appCompleted && fastInProgressParsing) || reparseChunkSize > 0)
val bus = new ReplayListenerBus()
val listener = new AppListingListener(fileStatus, clock, shouldHalt)
bus.addListener(listener)
logInfo(s"Parsing $logPath for listing data...")
Utils.tryWithResource(EventLoggingListener.openEventLog(logPath, fs)) { in =>
bus.replay(in, logPath.toString, !appCompleted, eventsFilter)
}
// If enabled above, the listing listener will halt parsing when there's enough information to
// create a listing entry. When the app is completed, or fast parsing is disabled, we still need
// to replay until the end of the log file to try to find the app end event. Instead of reading
// and parsing line by line, this code skips bytes from the underlying stream so that it is
// positioned somewhere close to the end of the log file.
//
// Because the application end event is written while some Spark subsystems such as the
// scheduler are still active, there is no guarantee that the end event will be the last
// in the log. So, to be safe, the code uses a configurable chunk to be re-parsed at
// the end of the file, and retries parsing the whole log later if the needed data is
// still not found.
//
// Note that skipping bytes in compressed files is still not cheap, but there are still some
// minor gains over the normal log parsing done by the replay bus.
//
// This code re-opens the file so that it knows where it's skipping to. This isn't as cheap as
// just skipping from the current position, but there isn't a a good way to detect what the
// current position is, since the replay listener bus buffers data internally.
val lookForEndEvent = shouldHalt && (appCompleted || !fastInProgressParsing)
if (lookForEndEvent && listener.applicationInfo.isDefined) {
Utils.tryWithResource(EventLoggingListener.openEventLog(logPath, fs)) { in =>
val target = fileStatus.getLen() - reparseChunkSize
if (target > 0) {
logInfo(s"Looking for end event; skipping $target bytes from $logPath...")
var skipped = 0L
while (skipped < target) {
skipped += in.skip(target - skipped)
}
}
val source = Source.fromInputStream(in).getLines()
// Because skipping may leave the stream in the middle of a line, read the next line
// before replaying.
if (target > 0) {
source.next()
}
bus.replay(source, logPath.toString, !appCompleted, eventsFilter)
}
}
logInfo(s"Finished parsing $logPath")
listener.applicationInfo match {
case Some(app) if !lookForEndEvent || app.attempts.head.info.completed =>
// In this case, we either didn't care about the end event, or we found it. So the
// listing data is good.
invalidateUI(app.info.id, app.attempts.head.info.attemptId)
addListing(app)
listing.write(LogInfo(logPath.toString(), scanTime, LogType.EventLogs, Some(app.info.id),
app.attempts.head.info.attemptId, fileStatus.getLen()))
// For a finished log, remove the corresponding "in progress" entry from the listing DB if
// the file is really gone.
if (appCompleted) {
val inProgressLog = logPath.toString() + EventLoggingListener.IN_PROGRESS
try {
// Fetch the entry first to avoid an RPC when it's already removed.
listing.read(classOf[LogInfo], inProgressLog)
if (!fs.isFile(new Path(inProgressLog))) {
listing.delete(classOf[LogInfo], inProgressLog)
}
} catch {
case _: NoSuchElementException =>
}
}
case Some(_) =>
// In this case, the attempt is still not marked as finished but was expected to. This can
// mean the end event is before the configured threshold, so call the method again to
// re-parse the whole log.
logInfo(s"Reparsing $logPath since end event was not found.")
mergeApplicationListing(fileStatus, scanTime, false)
case _ =>
// If the app hasn't written down its app ID to the logs, still record the entry in the
// listing db, with an empty ID. This will make the log eligible for deletion if the app
// does not make progress after the configured max log age.
listing.write(
LogInfo(logPath.toString(), scanTime, LogType.EventLogs, None, None, fileStatus.getLen()))
}
}
/**
* Invalidate an existing UI for a given app attempt. See LoadedAppUI for a discussion on the
* UI lifecycle.
*/
private def invalidateUI(appId: String, attemptId: Option[String]): Unit = {
synchronized {
activeUIs.get((appId, attemptId)).foreach { ui =>
ui.invalidate()
ui.ui.store.close()
}
}
}
/**
* Delete event logs from the log directory according to the clean policy defined by the user.
*/
private[history] def cleanLogs(): Unit = Utils.tryLog {
val maxTime = clock.getTimeMillis() - conf.get(MAX_LOG_AGE_S) * 1000
val expired = listing.view(classOf[ApplicationInfoWrapper])
.index("oldestAttempt")
.reverse()
.first(maxTime)
.asScala
.toList
expired.foreach { app =>
// Applications may have multiple attempts, some of which may not need to be deleted yet.
val (remaining, toDelete) = app.attempts.partition { attempt =>
attempt.info.lastUpdated.getTime() >= maxTime
}
if (remaining.nonEmpty) {
val newApp = new ApplicationInfoWrapper(app.info, remaining)
listing.write(newApp)
}
toDelete.foreach { attempt =>
logInfo(s"Deleting expired event log for ${attempt.logPath}")
val logPath = new Path(logDir, attempt.logPath)
listing.delete(classOf[LogInfo], logPath.toString())
cleanAppData(app.id, attempt.info.attemptId, logPath.toString())
deleteLog(fs, logPath)
}
if (remaining.isEmpty) {
listing.delete(app.getClass(), app.id)
}
}
// Delete log files that don't have a valid application and exceed the configured max age.
val stale = listing.view(classOf[LogInfo])
.index("lastProcessed")
.reverse()
.first(maxTime)
.asScala
.filter { l => l.logType == null || l.logType == LogType.EventLogs }
.toList
stale.foreach { log =>
if (log.appId.isEmpty) {
logInfo(s"Deleting invalid / corrupt event log ${log.logPath}")
deleteLog(fs, new Path(log.logPath))
listing.delete(classOf[LogInfo], log.logPath)
}
}
// Clean the blacklist from the expired entries.
clearBlacklist(CLEAN_INTERVAL_S)
}
/**
* Delete driver logs from the configured spark dfs dir that exceed the configured max age
*/
private[history] def cleanDriverLogs(): Unit = Utils.tryLog {
val driverLogDir = conf.get(DRIVER_LOG_DFS_DIR).get
val driverLogFs = new Path(driverLogDir).getFileSystem(hadoopConf)
val currentTime = clock.getTimeMillis()
val maxTime = currentTime - conf.get(MAX_DRIVER_LOG_AGE_S) * 1000
val logFiles = driverLogFs.listLocatedStatus(new Path(driverLogDir))
while (logFiles.hasNext()) {
val f = logFiles.next()
// Do not rely on 'modtime' as it is not updated for all filesystems when files are written to
val deleteFile =
try {
val info = listing.read(classOf[LogInfo], f.getPath().toString())
// Update the lastprocessedtime of file if it's length or modification time has changed
if (info.fileSize < f.getLen() || info.lastProcessed < f.getModificationTime()) {
listing.write(
info.copy(lastProcessed = currentTime, fileSize = f.getLen()))
false
} else if (info.lastProcessed > maxTime) {
false
} else {
true
}
} catch {
case e: NoSuchElementException =>
// For every new driver log file discovered, create a new entry in listing
listing.write(LogInfo(f.getPath().toString(), currentTime, LogType.DriverLogs, None,
None, f.getLen()))
false
}
if (deleteFile) {
logInfo(s"Deleting expired driver log for: ${f.getPath().getName()}")
listing.delete(classOf[LogInfo], f.getPath().toString())
deleteLog(driverLogFs, f.getPath())
}
}
// Delete driver log file entries that exceed the configured max age and
// may have been deleted on filesystem externally.
val stale = listing.view(classOf[LogInfo])
.index("lastProcessed")
.reverse()
.first(maxTime)
.asScala
.filter { l => l.logType != null && l.logType == LogType.DriverLogs }
.toList
stale.foreach { log =>
logInfo(s"Deleting invalid driver log ${log.logPath}")
listing.delete(classOf[LogInfo], log.logPath)
deleteLog(driverLogFs, new Path(log.logPath))
}
}
/**
* Rebuilds the application state store from its event log.
*/
private def rebuildAppStore(
store: KVStore,
eventLog: FileStatus,
lastUpdated: Long): Unit = {
// Disable async updates, since they cause higher memory usage, and it's ok to take longer
// to parse the event logs in the SHS.
val replayConf = conf.clone().set(ASYNC_TRACKING_ENABLED, false)
val trackingStore = new ElementTrackingStore(store, replayConf)
val replayBus = new ReplayListenerBus()
val listener = new AppStatusListener(trackingStore, replayConf, false,
lastUpdateTime = Some(lastUpdated))
replayBus.addListener(listener)
for {
plugin <- loadPlugins()
listener <- plugin.createListeners(conf, trackingStore)
} replayBus.addListener(listener)
try {
val path = eventLog.getPath()
logInfo(s"Parsing $path to re-build UI...")
Utils.tryWithResource(EventLoggingListener.openEventLog(path, fs)) { in =>
replayBus.replay(in, path.toString(), maybeTruncated = !isCompleted(path.toString()))
}
trackingStore.close(false)
logInfo(s"Finished parsing $path")
} catch {
case e: Exception =>
Utils.tryLogNonFatalError {
trackingStore.close()
}
throw e
}
}
/**
* Checks whether HDFS is in safe mode.
*
* Note that DistributedFileSystem is a `@LimitedPrivate` class, which for all practical reasons
* makes it more public than not.
*/
private[history] def isFsInSafeMode(): Boolean = fs match {
case dfs: DistributedFileSystem =>
isFsInSafeMode(dfs)
case _ =>
false
}
private[history] def isFsInSafeMode(dfs: DistributedFileSystem): Boolean = {
/* true to check only for Active NNs status */
dfs.setSafeMode(HdfsConstants.SafeModeAction.SAFEMODE_GET, true)
}
/**
* String description for diagnostics
* @return a summary of the component state
*/
override def toString: String = {
val count = listing.count(classOf[ApplicationInfoWrapper])
s"""|FsHistoryProvider{logdir=$logDir,
| storedir=$storePath,
| last scan time=$lastScanTime
| application count=$count}""".stripMargin
}
private def load(appId: String): ApplicationInfoWrapper = {
listing.read(classOf[ApplicationInfoWrapper], appId)
}
/**
* Write the app's information to the given store. Serialized to avoid the (notedly rare) case
* where two threads are processing separate attempts of the same application.
*/
private def addListing(app: ApplicationInfoWrapper): Unit = listing.synchronized {
val attempt = app.attempts.head
val oldApp = try {
load(app.id)
} catch {
case _: NoSuchElementException =>
app
}
def compareAttemptInfo(a1: AttemptInfoWrapper, a2: AttemptInfoWrapper): Boolean = {
a1.info.startTime.getTime() > a2.info.startTime.getTime()
}
val attempts = oldApp.attempts.filter(_.info.attemptId != attempt.info.attemptId) ++
List(attempt)
val newAppInfo = new ApplicationInfoWrapper(
app.info,
attempts.sortWith(compareAttemptInfo))
listing.write(newAppInfo)
}
private def loadDiskStore(
dm: HistoryServerDiskManager,
appId: String,
attempt: AttemptInfoWrapper): KVStore = {
val metadata = new AppStatusStoreMetadata(AppStatusStore.CURRENT_VERSION)
// First check if the store already exists and try to open it. If that fails, then get rid of
// the existing data.
dm.openStore(appId, attempt.info.attemptId).foreach { path =>
try {
return KVUtils.open(path, metadata)
} catch {
case e: Exception =>
logInfo(s"Failed to open existing store for $appId/${attempt.info.attemptId}.", e)
dm.release(appId, attempt.info.attemptId, delete = true)
}
}
// At this point the disk data either does not exist or was deleted because it failed to
// load, so the event log needs to be replayed.
val status = fs.getFileStatus(new Path(logDir, attempt.logPath))
val isCompressed = EventLoggingListener.codecName(status.getPath()).flatMap { name =>
Try(CompressionCodec.getShortName(name)).toOption
}.isDefined
logInfo(s"Leasing disk manager space for app $appId / ${attempt.info.attemptId}...")
val lease = dm.lease(status.getLen(), isCompressed)
val newStorePath = try {
Utils.tryWithResource(KVUtils.open(lease.tmpPath, metadata)) { store =>
rebuildAppStore(store, status, attempt.info.lastUpdated.getTime())
}
lease.commit(appId, attempt.info.attemptId)
} catch {
case e: Exception =>
lease.rollback()
throw e
}
KVUtils.open(newStorePath, metadata)
}
private def createInMemoryStore(attempt: AttemptInfoWrapper): KVStore = {
val store = new InMemoryStore()
val status = fs.getFileStatus(new Path(logDir, attempt.logPath))
rebuildAppStore(store, status, attempt.info.lastUpdated.getTime())
store
}
private def loadPlugins(): Iterable[AppHistoryServerPlugin] = {
ServiceLoader.load(classOf[AppHistoryServerPlugin], Utils.getContextOrSparkClassLoader).asScala
}
/** For testing. Returns internal data about a single attempt. */
private[history] def getAttempt(appId: String, attemptId: Option[String]): AttemptInfoWrapper = {
load(appId).attempts.find(_.info.attemptId == attemptId).getOrElse(
throw new NoSuchElementException(s"Cannot find attempt $attemptId of $appId."))
}
private def deleteLog(fs: FileSystem, log: Path): Unit = {
if (isBlacklisted(log)) {
logDebug(s"Skipping deleting $log as we don't have permissions on it.")
} else {
try {
fs.delete(log, true)
} catch {
case _: AccessControlException =>
logInfo(s"No permission to delete $log, ignoring.")
case ioe: IOException =>
logError(s"IOException in cleaning $log", ioe)
}
}
}
private def isCompleted(name: String): Boolean = {
!name.endsWith(EventLoggingListener.IN_PROGRESS)
}
}
private[history] object FsHistoryProvider {
private val APPL_START_EVENT_PREFIX = "{\\"Event\\":\\"SparkListenerApplicationStart\\""
private val APPL_END_EVENT_PREFIX = "{\\"Event\\":\\"SparkListenerApplicationEnd\\""
private val LOG_START_EVENT_PREFIX = "{\\"Event\\":\\"SparkListenerLogStart\\""
private val ENV_UPDATE_EVENT_PREFIX = "{\\"Event\\":\\"SparkListenerEnvironmentUpdate\\","
/**
* Current version of the data written to the listing database. When opening an existing
* db, if the version does not match this value, the FsHistoryProvider will throw away
* all data and re-generate the listing data from the event logs.
*/
private[history] val CURRENT_LISTING_VERSION = 1L
}
private[history] case class FsHistoryProviderMetadata(
version: Long,
uiVersion: Long,
logDir: String)
private[history] object LogType extends Enumeration {
val DriverLogs, EventLogs = Value
}
/**
* Tracking info for event logs detected in the configured log directory. Tracks both valid and
* invalid logs (e.g. unparseable logs, recorded as logs with no app ID) so that the cleaner
* can know what log files are safe to delete.
*/
private[history] case class LogInfo(
@KVIndexParam logPath: String,
@KVIndexParam("lastProcessed") lastProcessed: Long,
logType: LogType.Value,
appId: Option[String],
attemptId: Option[String],
fileSize: Long)
private[history] class AttemptInfoWrapper(
val info: ApplicationAttemptInfo,
val logPath: String,
val fileSize: Long,
val adminAcls: Option[String],
val viewAcls: Option[String],
val adminAclsGroups: Option[String],
val viewAclsGroups: Option[String])
private[history] class ApplicationInfoWrapper(
val info: ApplicationInfo,
val attempts: List[AttemptInfoWrapper]) {
@JsonIgnore @KVIndexParam
def id: String = info.id
@JsonIgnore @KVIndexParam("endTime")
def endTime(): Long = attempts.head.info.endTime.getTime()
@JsonIgnore @KVIndexParam("oldestAttempt")
def oldestAttempt(): Long = attempts.map(_.info.lastUpdated.getTime()).min
def toApplicationInfo(): ApplicationInfo = info.copy(attempts = attempts.map(_.info))
}
private[history] class AppListingListener(
log: FileStatus,
clock: Clock,
haltEnabled: Boolean) extends SparkListener {
private val app = new MutableApplicationInfo()
private val attempt = new MutableAttemptInfo(log.getPath().getName(), log.getLen())
private var gotEnvUpdate = false
private var halted = false
override def onApplicationStart(event: SparkListenerApplicationStart): Unit = {
app.id = event.appId.orNull
app.name = event.appName
attempt.attemptId = event.appAttemptId
attempt.startTime = new Date(event.time)
attempt.lastUpdated = new Date(clock.getTimeMillis())
attempt.sparkUser = event.sparkUser
checkProgress()
}
override def onApplicationEnd(event: SparkListenerApplicationEnd): Unit = {
attempt.endTime = new Date(event.time)
attempt.lastUpdated = new Date(log.getModificationTime())
attempt.duration = event.time - attempt.startTime.getTime()
attempt.completed = true
}
override def onEnvironmentUpdate(event: SparkListenerEnvironmentUpdate): Unit = {
// Only parse the first env update, since any future changes don't have any effect on
// the ACLs set for the UI.
if (!gotEnvUpdate) {
def emptyStringToNone(strOption: Option[String]): Option[String] = strOption match {
case Some("") => None
case _ => strOption
}
val allProperties = event.environmentDetails("Spark Properties").toMap
attempt.viewAcls = emptyStringToNone(allProperties.get(UI_VIEW_ACLS.key))
attempt.adminAcls = emptyStringToNone(allProperties.get(ADMIN_ACLS.key))
attempt.viewAclsGroups = emptyStringToNone(allProperties.get(UI_VIEW_ACLS_GROUPS.key))
attempt.adminAclsGroups = emptyStringToNone(allProperties.get(ADMIN_ACLS_GROUPS.key))
gotEnvUpdate = true
checkProgress()
}
}
override def onOtherEvent(event: SparkListenerEvent): Unit = event match {
case SparkListenerLogStart(sparkVersion) =>
attempt.appSparkVersion = sparkVersion
case _ =>
}
def applicationInfo: Option[ApplicationInfoWrapper] = {
if (app.id != null) {
Some(app.toView())
} else {
None
}
}
/**
* Throws a halt exception to stop replay if enough data to create the app listing has been
* read.
*/
private def checkProgress(): Unit = {
if (haltEnabled && !halted && app.id != null && gotEnvUpdate) {
halted = true
throw new HaltReplayException()
}
}
private class MutableApplicationInfo {
var id: String = null
var name: String = null
var coresGranted: Option[Int] = None
var maxCores: Option[Int] = None
var coresPerExecutor: Option[Int] = None
var memoryPerExecutorMB: Option[Int] = None
def toView(): ApplicationInfoWrapper = {
val apiInfo = ApplicationInfo(id, name, coresGranted, maxCores, coresPerExecutor,
memoryPerExecutorMB, Nil)
new ApplicationInfoWrapper(apiInfo, List(attempt.toView()))
}
}
private class MutableAttemptInfo(logPath: String, fileSize: Long) {
var attemptId: Option[String] = None
var startTime = new Date(-1)
var endTime = new Date(-1)
var lastUpdated = new Date(-1)
var duration = 0L
var sparkUser: String = null
var completed = false
var appSparkVersion = ""
var adminAcls: Option[String] = None
var viewAcls: Option[String] = None
var adminAclsGroups: Option[String] = None
var viewAclsGroups: Option[String] = None
def toView(): AttemptInfoWrapper = {
val apiInfo = ApplicationAttemptInfo(
attemptId,
startTime,
endTime,
lastUpdated,
duration,
sparkUser,
completed,
appSparkVersion)
new AttemptInfoWrapper(
apiInfo,
logPath,
fileSize,
adminAcls,
viewAcls,
adminAclsGroups,
viewAclsGroups)
}
}
}
| WindCanDie/spark | core/src/main/scala/org/apache/spark/deploy/history/FsHistoryProvider.scala | Scala | apache-2.0 | 47,914 |
/***********************************************************************
* Copyright (c) 2013-2020 Commonwealth Computer Research, Inc.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Apache License, Version 2.0
* which accompanies this distribution and is available at
* http://www.opensource.org/licenses/apache2.0.php.
***********************************************************************/
package org.locationtech.geomesa.tools.export
import java.io.{File, FileInputStream, FileWriter}
import java.nio.charset.StandardCharsets
import java.nio.file.Files
import java.util.concurrent.atomic.AtomicInteger
import java.util.{Collections, Date}
import org.apache.commons.csv.CSVFormat
import org.apache.commons.io.IOUtils
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.parquet.filter2.compat.FilterCompat
import org.geotools.data._
import org.geotools.data.collection.ListFeatureCollection
import org.geotools.data.memory.{MemoryDataStore, MemoryEntry}
import org.geotools.data.shapefile.ShapefileDataStore
import org.geotools.data.simple.SimpleFeatureStore
import org.geotools.filter.text.ecql.ECQL
import org.geotools.util.URLs
import org.geotools.util.factory.Hints
import org.geotools.wfs.GML
import org.junit.runner.RunWith
import org.locationtech.geomesa.arrow.io.SimpleFeatureArrowFileReader
import org.locationtech.geomesa.convert.text.DelimitedTextConverter
import org.locationtech.geomesa.convert2.SimpleFeatureConverter
import org.locationtech.geomesa.features.ScalaSimpleFeature
import org.locationtech.geomesa.features.avro.AvroDataFileReader
import org.locationtech.geomesa.fs.storage.common.jobs.StorageConfiguration
import org.locationtech.geomesa.fs.storage.orc.OrcFileSystemReader
import org.locationtech.geomesa.parquet.ParquetPathReader
import org.locationtech.geomesa.tools.DataStoreRegistration
import org.locationtech.geomesa.tools.export.ExportCommand.ExportParams
import org.locationtech.geomesa.tools.export.formats.ExportFormat
import org.locationtech.geomesa.utils.bin.BinaryOutputEncoder
import org.locationtech.geomesa.utils.collection.SelfClosingIterator
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes
import org.locationtech.geomesa.utils.io.{PathUtils, WithClose, WithStore}
import org.opengis.feature.simple.{SimpleFeature, SimpleFeatureType}
import org.specs2.mutable.Specification
import org.specs2.runner.JUnitRunner
@RunWith(classOf[JUnitRunner])
class ExportCommandTest extends Specification {
import scala.collection.JavaConverters._
val excludes = Seq(ExportFormat.Null)
val formats = ExportFormat.Formats.filterNot(excludes.contains)
val sft = SimpleFeatureTypes.createType("tools", "name:String,dtg:Date,*geom:Point:srid=4326")
val features = List(
// note: shapefiles don't support timestamps, so we leave them at 00:00...
ScalaSimpleFeature.create(sft, "id2", "name2", "2016-01-02T00:00:00.000Z", "POINT(0 2)"),
ScalaSimpleFeature.create(sft, "id1", "name1", "2016-01-01T00:00:00.000Z", "POINT(1 0)")
)
features.foreach(_.getUserData.put(Hints.USE_PROVIDED_FID, java.lang.Boolean.TRUE))
var ds: MemoryDataStore = _
private val counter = new AtomicInteger(0)
def withCommand[T](fn: ExportCommand[DataStore] => T): T = {
val key = s"${getClass.getName}:${counter.getAndIncrement()}"
val command: ExportCommand[DataStore] = new ExportCommand[DataStore]() {
override val params: ExportParams = new ExportParams() {
override def featureName: String = sft.getTypeName
}
override def connection: Map[String, String] = Map(DataStoreRegistration.param.key -> key)
}
DataStoreRegistration.register(key, ds)
try { fn(command) } finally {
DataStoreRegistration.unregister(key, ds)
}
}
var out: java.nio.file.Path = _
step {
out = Files.createTempDirectory("gm-export-fs-test")
ds = new MemoryDataStore() {
override def dispose(): Unit = {} // prevent dispose from deleting our data
}
ds.createSchema(sft)
ds.getFeatureSource(sft.getTypeName).asInstanceOf[SimpleFeatureStore]
.addFeatures(new ListFeatureCollection(sft, features.map(ScalaSimpleFeature.copy).toArray[SimpleFeature]))
ds.getEntry(sft.getName).asInstanceOf[MemoryEntry].getMemory.asScala.foreach { case (_, feature) =>
feature.getUserData.clear() // clear out the 'original feature' which causes serialization issues...
}
}
"Export command" should {
"export to different file formats" in {
forall(formats) { format =>
val file = s"$out/${format.name}/base/out.${format.extensions.head}"
withCommand { command =>
command.params.file = file
command.execute()
}
readFeatures(format, file) mustEqual features
}
}
"support filtering" in {
forall(formats) { format =>
val file = s"$out/${format.name}/filter/out.${format.extensions.head}"
withCommand { command =>
command.params.file = file
command.params.cqlFilter = ECQL.toFilter("dtg = '2016-01-01T00:00:00.000Z'")
command.execute()
}
readFeatures(format, file) mustEqual features.drop(1)
}
}
"support relational projections" in {
forall(formats) { format =>
val file = s"$out/${format.name}/project/out.${format.extensions.head}"
withCommand { command =>
command.params.file = file
command.params.attributes = List("dtg", "geom", "id").asJava
command.execute()
}
val tsft = SimpleFeatureTypes.createType(sft.getTypeName, "dtg:Date,*geom:Point:srid=4326")
readFeatures(format, file, tsft) mustEqual features.map(ScalaSimpleFeature.retype(tsft, _))
}
}
"support sorting" in {
forall(formats) { format =>
val file = s"$out/${format.name}/sort/out.${format.extensions.head}"
withCommand { command =>
command.params.file = file
command.params.sortFields = Collections.singletonList("dtg")
command.execute()
}
readFeatures(format, file) mustEqual features.reverse
}
// exclude BIN as we only support sort in ascending order
forall(formats.filter(_ != ExportFormat.Bin)) { format =>
val file = s"$out/${format.name}/sort-rev/out.${format.extensions.head}"
withCommand { command =>
command.params.file = file
command.params.sortFields = Collections.singletonList("dtg")
command.params.sortDescending = true
command.execute()
}
readFeatures(format, file) mustEqual features
}
}
"support max features" in {
forall(formats) { format =>
val file = s"$out/${format.name}/max/out.${format.extensions.head}"
withCommand { command =>
command.params.file = file
command.params.maxFeatures = 1
command.execute()
}
readFeatures(format, file) mustEqual features.take(1)
}
}
}
step {
PathUtils.deleteRecursively(out)
}
def readFeatures(format: ExportFormat, file: String, sft: SimpleFeatureType = this.sft): Seq[SimpleFeature] = {
format match {
case ExportFormat.Arrow => readArrow(file)
case ExportFormat.Avro => readAvro(file)
case ExportFormat.Bin => readBin(file, sft)
case ExportFormat.Csv => readCsv(file)
case ExportFormat.Json => readJson(file, sft)
case ExportFormat.Leaflet => readLeaflet(file, sft)
case ExportFormat.Orc => readOrc(file, sft)
case ExportFormat.Parquet => readParquet(file, sft)
case ExportFormat.Shp => readShp(file, sft)
case ExportFormat.Tsv => readTsv(file)
case ExportFormat.Gml2 => readGml2(file, sft)
case ExportFormat.Gml3 => readGml3(file, sft)
}
}
def readArrow(file: String): Seq[SimpleFeature] = {
WithClose(SimpleFeatureArrowFileReader.streaming(() => new FileInputStream(file))) { reader =>
SelfClosingIterator(reader.features()).map(ScalaSimpleFeature.copy).toList
}
}
def readAvro(file: String): Seq[SimpleFeature] =
WithClose(new AvroDataFileReader(new FileInputStream(file)))(_.toList)
def readBin(file: String, sft: SimpleFeatureType): Seq[SimpleFeature] = {
val bytes = IOUtils.toByteArray(new FileInputStream(file))
// hack - set id and name from original features since they aren't exported in the bin format
bytes.grouped(16).map(BinaryOutputEncoder.decode).toSeq.map { values =>
val dtg = new Date(values.dtg)
val f1 = features.find(_.getAttribute("dtg") == dtg).get
val attributes = sft.getAttributeDescriptors.asScala.map(_.getLocalName).map {
case "geom" => s"POINT (${values.lon} ${values.lat})"
case "dtg" => dtg
case "name" => f1.getAttribute("name")
}
ScalaSimpleFeature.create(sft, f1.getID, attributes: _*)
}
}
def readCsv(file: String): Seq[SimpleFeature] =
DelimitedTextConverter.magicParsing(sft.getTypeName, new FileInputStream(file)).toList
def readJson(file: String, sft: SimpleFeatureType): Seq[SimpleFeature] = {
val converter = SimpleFeatureConverter.infer(() => new FileInputStream(file), None, Some(file)) match {
case None => ko(s"could not create converter from $file"); null: SimpleFeatureConverter
case Some((s, c)) => SimpleFeatureConverter(s, c)
}
val result = Seq.newBuilder[SimpleFeature]
val names = sft.getAttributeDescriptors.asScala.map(_.getLocalName)
WithClose(converter.process(new FileInputStream(file))) { features =>
features.foreach { f =>
val copy = new ScalaSimpleFeature(sft, f.getID)
names.foreach(a => copy.setAttribute(a, f.getAttribute(a)))
result += copy
}
}
result.result()
}
def readLeaflet(file: String, sft: SimpleFeatureType): Seq[SimpleFeature] = {
val html = IOUtils.toString(new FileInputStream(file), StandardCharsets.UTF_8)
val i = html.indexOf("var points = ") + 13
val json = html.substring(i, html.indexOf(";", i))
val tmp = Files.createTempFile("gm-export-leaflet", ".json").toFile
try {
WithClose(new FileWriter(tmp))(IOUtils.write(json, _))
readJson(tmp.getAbsolutePath, sft)
} finally {
if (!tmp.delete()) {
tmp.deleteOnExit()
}
}
}
def readOrc(file: String, sft: SimpleFeatureType): Seq[SimpleFeature] = {
val path = new Path(PathUtils.getUrl(file).toURI)
WithClose(new OrcFileSystemReader(sft, new Configuration, None, None).read(path)) { iter =>
iter.map(ScalaSimpleFeature.copy).toList
}
}
def readParquet(file: String, sft: SimpleFeatureType): Seq[SimpleFeature] = {
val path = new Path(PathUtils.getUrl(file).toURI)
val conf = new Configuration()
StorageConfiguration.setSft(conf, sft)
WithClose(new ParquetPathReader(conf, sft, FilterCompat.NOOP, None, None).read(path)) { iter =>
iter.map(ScalaSimpleFeature.copy).toList
}
}
def readShp(file: String, sft: SimpleFeatureType): Seq[SimpleFeature] = {
WithStore[ShapefileDataStore](Map("url" -> URLs.fileToUrl(new File(file)))) { ds =>
// hack - set id from original features since USE_PROVIDED_FID is not supported in shapefiles
SelfClosingIterator(ds.getFeatureReader).toList.map { f =>
val dtg = f.getAttribute("dtg")
val f1 = features.find(_.getAttribute("dtg") == dtg).get
val attributes = sft.getAttributeDescriptors.asScala.map(_.getLocalName).map {
case "geom" => f.getAttribute(0)
case "dtg" => dtg
case "name" => f.getAttribute("name")
}
ScalaSimpleFeature.create(sft, f1.getID, attributes: _*)
}
}
}
def readTsv(file: String): Seq[SimpleFeature] =
DelimitedTextConverter.magicParsing(sft.getTypeName, new FileInputStream(file), CSVFormat.TDF).toList
def readGml2(file: String, sft: SimpleFeatureType): Seq[SimpleFeature] = readGml3(file, sft)
def readGml3(file: String, sft: SimpleFeatureType): Seq[SimpleFeature] = {
SelfClosingIterator(new GML(GML.Version.GML3).decodeFeatureIterator(new FileInputStream(file))).toList.map { f =>
ScalaSimpleFeature.copy(DataUtilities.reType(sft, f))
}
}
}
| aheyne/geomesa | geomesa-tools/src/test/scala/org/locationtech/geomesa/tools/export/ExportCommandTest.scala | Scala | apache-2.0 | 12,362 |
package fr.janalyse.ssh
import java.io._
import com.jcraft.jsch.ChannelShell
import java.util.concurrent.ArrayBlockingQueue
class SSHPowerShell(implicit ssh: SSH) extends PowerShellOperations {
override def execute(cmd: SSHCommand): String = {
synchronized {
sendCommand(cmd.cmd.replace('\\n',' '))
fromServer.getResponse()
}
}
private val defaultPrompt = """_T-:+"""
val prompt: String = ssh.options.prompt getOrElse defaultPrompt
val options: SSHOptions = ssh.options
private val (channel, toServer, fromServer) = {
var ch: ChannelShell = ssh.jschsession().openChannel("shell").asInstanceOf[ChannelShell]
ch.setPtyType("dumb")
ch.setXForwarding(false)
val pos = new PipedOutputStream()
val pis = new PipedInputStream(pos)
val toServer = new Producer(pos)
ch.setInputStream(pis)
val fromServer = new ConsumerOutputStream()
ch.setOutputStream(fromServer)
ch.connect(ssh.options.connectTimeout.toInt)
(ch, toServer, fromServer)
}
def close(): Unit = {
fromServer.close()
toServer.close()
channel.disconnect()
}
private def shellInit() = {
toServer.send(s"""function prompt {"$prompt"}""")
//Must read output twice to get through the set prompt command echo and then the initial prompt
fromServer.getResponse()
fromServer.getResponse()
}
private var doInit = true
private def sendCommand(cmd: String): Unit = {
if (doInit) {
shellInit()
doInit = false
}
toServer.send(cmd)
}
// -----------------------------------------------------------------------------------
class Producer(output: OutputStream) {
private def sendChar(char: Int):Unit = {
output.write(char)
output.flush()
}
private def sendString(cmd: String):Unit = {
output.write(cmd.getBytes)
nl()
output.flush()
}
def send(cmd: String):Unit = { sendString(cmd) }
def break():Unit = { sendChar(3) } // Ctrl-C
def exit():Unit = { sendChar(4) } // Ctrl-D
def excape():Unit = { sendChar(27) } // ESC
def nl():Unit = { sendChar(10) } // LF or NEWLINE or ENTER or Ctrl-J
def cr():Unit = { sendChar(13) } // CR
def close():Unit = { output.close() }
}
// -----------------------------------------------------------------------------------
// Output from remote server to here
class ConsumerOutputStream() extends OutputStream {
import java.util.concurrent.TimeUnit
private val resultsQueue = new ArrayBlockingQueue[String](10)
def hasResponse(): Boolean = resultsQueue.size > 0
def getResponse(timeout: Long = ssh.options.timeout): String = {
if (timeout == 0L) resultsQueue.take()
else {
resultsQueue.poll(timeout, TimeUnit.MILLISECONDS) match {
case null =>
toServer.break()
//val output = resultsQueue.take() => Already be blocked with this wait instruction...
val output = resultsQueue.poll(5, TimeUnit.SECONDS) match {
case null => "**no return value - couldn't break current operation**"
case x => x
}
throw new SSHTimeoutException(output, "") // We couldn't distinguish stdout from stderr within a shell session
case x => x
}
}
}
private val consumerAppender = new StringBuilder(8192)
private val promptSize = prompt.length
def write(b: Int):Unit = {
if (b != 13) { //CR removed... CR is always added by JSCH !!!!
val ch = b.toChar
consumerAppender.append(ch) // TODO - Add charset support
if (consumerAppender.endsWith(prompt)) {
val promptIndex = consumerAppender.size - promptSize
val firstNlIndex = consumerAppender.indexOf("\\n")
val result = consumerAppender.substring(firstNlIndex + 1, promptIndex)
resultsQueue.put(result)
consumerAppender.clear
}
}
}
}
}
| dacr/jassh | src/main/scala/fr/janalyse/ssh/SSHPowerShell.scala | Scala | apache-2.0 | 3,945 |
/**
* Copyright (c) 2002-2012 "Neo Technology,"
* Network Engine for Objects in Lund AB [http://neotechnology.com]
*
* This file is part of Neo4j.
*
* Neo4j is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.neo4j.cypher.internal.pipes.matching
abstract class PatternElement(val key: String) {
def traverse[T](shouldFollow: (PatternElement) => Boolean,
visitNode: (PatternNode, T) => T,
visitRelationship: (PatternRelationship, T) => T,
data: T,
path: Seq[PatternElement])
def traverse[T](shouldFollow: (PatternElement) => Boolean,
visit: (PatternElement, T) => T,
data: T,
path: Seq[PatternElement]) {
traverse(shouldFollow, visit, visit, data, path)
}
} | dksaputra/community | cypher/src/main/scala/org/neo4j/cypher/internal/pipes/matching/PatternElement.scala | Scala | gpl-3.0 | 1,394 |
/*
Copyright 2012 Twitter, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import com.twitter.scalding._
/**
Scalding tutorial part 0.
This is the simplest possible scalding job: it reads from one data source and writes the data,
unchanged, to another.
To test it, from the science directory, first make sure you've built the target/scalding-assembly-0.2.0.jar:
from the base directory type:
sbt assembly
yarn jar target/scalding-tutorial-0.11.2.jar Tutorial0 --local
You can check the input:
cat data/hello.txt
And the output:
cat target/data/output0.txt
The output should look just like the input, but with line numbers.
More on this in part 1 of the tutorial.
**/
/**
All jobs in scalding are represented by a subclass of com.twitter.scalding.Job.
The constructor must take a single com.twitter.scalding.Args, even if, as here,
we don't use it.
For the scald.rb script to work, name the class to match the file,
and don't use a package.
**/
class Tutorial0(args : Args) extends Job(args) {
/**
Both input and output data sources are represented by instances of
com.twitter.scalding.Source.
Scalding comes with some basic source types like TextLine and Tsv.
There are also many twitter-specific types like MergedAdRequestSource.
**/
val input = TextLine("data/hello.txt")
val output = TextLine("target/data/output0.txt")
/**
This is the minimal pipeline. Source.read returns a cascading.pipe.Pipe, which represents
a stream of data. We can transform this stream in many ways, but here we're simply
asking it to write itself to the output source.
**/
input.read.write(output)
/**
By the way, if you look at the docs for Pipe, you won't find write there. That's
because it's actually defined on com.twitter.scalding.RichPipe. Most of the methods
we call on Pipes will actually be found on RichPipe; in typical scala style,
the conversion between them is implicit.
**/
}
| abhibond/scalding-template | src/main/scala/Tutorial0.scala | Scala | apache-2.0 | 2,413 |
package com.twitter.finagle.builder
import com.twitter.concurrent.Spool
import com.twitter.finagle.stats.{StatsReceiver, NullStatsReceiver}
import com.twitter.util.Future
/**
* A Cluster implementation that guarantees a minimum set while allowing you
* to specify a Cluster to supplement the initial static set. All operations
* that would remove entries in the minimum set are censored and counted.
*/
@deprecated("Use `com.twitter.finagle.Name` to represent clusters instead", "2014-11-21")
class MinimumSetCluster[T](
minimum: Set[T],
supplementary: Cluster[T],
statsReceiver: StatsReceiver = NullStatsReceiver
) extends Cluster[T] {
private[this] val censoredAdd = statsReceiver.counter("censored_add")
private[this] val censoredRem = statsReceiver.counter("censored_rem")
private[this] val missingGauge = statsReceiver.addGauge("missing") {
(supplementary.snap._1 diff minimum.toSeq).size
}
private[this] val additionalGauge = statsReceiver.addGauge("additional") {
(minimum.toSeq diff supplementary.snap._1).size
}
def snap: (Seq[T], Future[Spool[Cluster.Change[T]]]) = {
val (supplementaryCluster, supplementaryUpdates) = supplementary.snap
val unionCluster = (minimum ++ Set(supplementaryCluster: _*)).toSeq
val censoredUpdates = supplementaryUpdates flatMap { updates =>
updates filter { update =>
val ignore = minimum.contains(update.value)
if (ignore) {
update match {
case Cluster.Add(_) => censoredAdd.incr()
case Cluster.Rem(_) => censoredRem.incr()
}
}
!ignore
}
}
(unionCluster, censoredUpdates)
}
}
| lysu/finagle | finagle-core/src/main/scala/com/twitter/finagle/builder/MinimumSetCluster.scala | Scala | apache-2.0 | 1,668 |
/*
* -------------------------------------------------------------------------------------------------
* - Project: Objectify -
* - Copyright: ©2014 Matygo Educational Incorporated operating as Learndot -
* - Author: Arthur Gonigberg ([email protected]) and contributors (see contributors.txt) -
* - License: Licensed under MIT license (see license.txt) -
* -------------------------------------------------------------------------------------------------
*/
package org.objectify.resolvers
import org.objectify.adapters.ObjectifyRequestAdapter
/**
* Resolve the body as String
*/
class BodyResolver extends Resolver[String, ObjectifyRequestAdapter] {
def apply(req: ObjectifyRequestAdapter) = {
req.getBody
}
}
| learndot/Objectify.scala | src/main/scala/org/objectify/resolvers/BodyResolver.scala | Scala | mit | 880 |
package org.sandbox.chat.cluster
import scala.annotation.migration
import scala.util.Random
import org.sandbox.chat.cluster.ChatClusterRole.roleToString
import akka.actor.ActorContext
import akka.actor.ActorRef
import akka.cluster.Member
import akka.event.LoggingAdapter
import akka.util.Timeout
class ChatClusterActors(role: ChatClusterRole,
implicit val context: ActorContext, implicit val timeout: Timeout,
implicit val log: LoggingAdapter) extends ActorResolver
{
private var clusterActors: Map[Member,ActorRef] = Map.empty
def randomActor: Option[ActorRef] =
Random.shuffle(clusterActors.values.toSeq).headOption
def foreach(f: ((Member,ActorRef)) => Unit): Unit =
clusterActors foreach f
def onMemberUp(member: Member) = {
if (member.hasRole(role)) {
val actor = resolveActorForMember(member, role)
actor foreach { a =>
clusterActors += member -> a
context watch a
log.info(s"watching ${a.path}")
}
}
}
def onMemberDown(member: Member) =
clusterActors -= member
def onTerminated(actor: ActorRef) =
clusterActors = clusterActors.filterNot { case (_, a) => a == actor }
}
| hustbill/ScalaDemo | src/main/scala/org/sandbox/chat/cluster/ChatClusterActors.scala | Scala | gpl-2.0 | 1,173 |
/*
* Copyright 2010 LinkedIn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package kafka.utils
import java.util.concurrent._
import java.util.concurrent.atomic._
import kafka.utils._
import org.apache.log4j.Logger
/**
* A scheduler for running jobs in the background
* TODO: ScheduledThreadPoolExecutor notriously swallows exceptions
*/
class KafkaScheduler(val numThreads: Int, val baseThreadName: String, isDaemon: Boolean) {
private val logger = Logger.getLogger(getClass())
private val threadId = new AtomicLong(0)
private val executor = new ScheduledThreadPoolExecutor(numThreads, new ThreadFactory() {
def newThread(runnable: Runnable): Thread = {
val t = new Thread(runnable, baseThreadName + threadId.getAndIncrement)
t.setDaemon(isDaemon)
t
}
})
def scheduleWithRate(fun: () => Unit, delayMs: Long, periodMs: Long) =
executor.scheduleAtFixedRate(Utils.loggedRunnable(fun), delayMs, periodMs, TimeUnit.MILLISECONDS)
def shutdown() = {
executor.shutdownNow
logger.info("shutdown scheduler " + baseThreadName)
}
}
| jinfei21/kafka | src/kafka/utils/KafkaScheduler.scala | Scala | apache-2.0 | 1,600 |
package org.sofi.deadman.load
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent._
import scala.concurrent.duration._
// Complete some tasks in the deadman switch service
object Complete extends App with Profile {
// Complete tasks for the given aggregate
private def completeTasks(aggregates: Seq[Int]): Future[Unit] = Future {
println(s"Completing tasks for aggregates: ${aggregates.mkString(" ")}")
aggregates.foreach { a ⇒
val tasks = (1 to numEntities).map { j ⇒
Map[String, Any]("key" -> s"task$j", "aggregate" -> s"$a", "entity" -> s"${a - 1}")
}
val port = ports(a % ports.length)
val rep = Http.post(s"http://127.0.0.1:$port/deadman/api/v1/complete", Json.encode(tasks))
if (rep.status != Http.OK) {
println(s"${rep.status}: ${rep.body}")
}
}
}
// Complete tasks for a range of aggregates
def completeAggregates() =
Future.sequence {
(1 to numAggregates).grouped(groupSize).map(completeTasks)
}
// Wait until all Futures are finished
Await.result(completeAggregates(), 10.minutes)
println("done!")
}
| SocialFinance/deadman-switch | load/src/main/scala/org/sofi/deadman/load/Complete.scala | Scala | bsd-3-clause | 1,144 |
package fr.janalyse.wmirp
import org.scalatest.FunSuite
import org.scalatest.junit.JUnitRunner
import java.lang.management.ManagementFactory
import org.scalatest.matchers.ShouldMatchers
import org.scalatest.BeforeAndAfterAll
import scala.concurrent._
import scala.concurrent.duration._
import com.jacob.activeX.ActiveXComponent
import com.jacob.com.Dispatch
import com.jacob.com.EnumVariant
import com.jacob.com.Variant
class WMITest extends FunSuite with ShouldMatchers with BeforeAndAfterAll {
implicit var wmi: WMI = _
override def beforeAll() {
wmi = new WMI {}
}
override def afterAll() {
wmi.close()
}
test("Browsing test 1") {
val all = wmi.getClasses()
all.size should be > (200)
info(s"found ${all.size} com classes")
}
test("Browsing test 2") {
val perfs = wmi.getPerfClasses()
perfs.size should be > (20)
info(s"found ${perfs.size} performances com classes")
val processor = perfs.find(_.name contains "PerfOS_Processor")
processor should be ('defined)
val system = perfs.find(_.name contains "PerfOS_System")
system should be ('defined)
}
test("Get class attributes") {
val processorcl = ComClass("Win32_PerfRawData_PerfOS_Processor")
val attrs = processorcl.attributes
attrs.size should be > (0)
attrs should contain ("PercentProcessorTime")
attrs should contain ("PercentIdleTime")
}
test("Get class instances") {
val processors = wmi.getInstances("Win32_PerfFormattedData_PerfOS_Processor")
processors.size should be > (0)
processors.map(_.name.get) should contain ("_Total")
}
test("Get instance values first CPU") {
val cpu = wmi.getInstance("Win32_PerfFormattedData_PerfOS_Processor", "0")
cpu should be ('defined)
val entries=cpu.get.entries
val idle = entries.get("PercentIdleTime").map(_.getString.toInt)
val user = entries.get("PercentUserTime").map(_.getString.toInt)
info(s"CPU Usage : idle=$idle user=$user")
idle.get should be >(0)
}
test("Get instance values Total 1") {
val cpu = wmi.getInstance("""Win32_PerfFormattedData_PerfOS_Processor""", "_Total")
cpu should be ('defined)
val entries=cpu.get.entries
val idle = entries.get("PercentIdleTime").map(_.getString.toInt)
val user = entries.get("PercentUserTime").map(_.getString.toInt)
info(s"CPU Usage : idle=$idle user=$user")
idle.get should be >(0)
}
test("Get instance values Total 2 full path") {
val cpu = wmi.getInstance("""\\\\.\\root\\cimv2:Win32_PerfFormattedData_PerfOS_Processor""", "_Total")
cpu should be ('defined)
val entries=cpu.get.entries
val idle = entries.get("PercentIdleTime").map(_.getString.toInt)
val user = entries.get("PercentUserTime").map(_.getString.toInt)
info(s"CPU Usage : idle=$idle user=$user")
idle.get should be >(0)
}
test("Get standalone instance") {
val sys = wmi.getInstance("Win32_PerfFormattedData_PerfOS_System")
sys should be ('defined)
val entries = sys.get.entries
val processes = entries.get("Processes").map(_.toString.toInt)
val threads = entries.get("Threads").map(_.toString.toInt)
info(s"System : Processes=$processes threads=$threads")
processes.get should be >(0)
threads.get should be > (0)
}
ignore("Get unknown instance") {
val cpu = wmi.getInstance("Win32_PerfFormattedData_PerfOS_Processor", "trucmuche")
cpu should be ('empty)
}
test("Performance walk - search metrics") {
val numRE = """(\\d+(?:[.,]\\d+)?)""".r
val found = for {
perfclass <- wmi.getPerfClasses
instance <- {println(perfclass.name) ; perfclass.instances}
(key,numRE(value)) <- instance.entries.map{case(k,v)=> k -> v.toString}
clname = perfclass.name
iname = instance.name.getOrElse("default")
} yield {
s"$clname/$iname.$key=$value"
}
info(s"Found ${found.size} numerical values")
found.size should be >(50)
found.filter(_ contains "PercentProcessor").take(5).foreach(info(_))
found.filter(_ contains "PerfOS_System").foreach(info(_))
}
}
| dacr/wmirp | src/test/scala/fr/janalyse/wmirp/WMITest.scala | Scala | apache-2.0 | 4,096 |
package scala.slick.lifted
import scala.language.{implicitConversions, higherKinds}
import scala.slick.ast._
import FunctionSymbolExtensionMethods._
import ScalaBaseType._
import scala.slick.SlickException
trait ExtensionMethods[B1, P1] extends Any {
def c: Column[P1]
@inline def n = c.toNode
@inline implicit def p1Type = c.tpe
implicit def b1Type = (c.tpe match {
case o: OptionTypedType[_] => o.elementType
case b => b
}).asInstanceOf[TypedType[B1]]
implicit def optionType = (c.tpe match {
case o: OptionTypedType[_] => o
case b => b.optionType
}).asInstanceOf[TypedType[Option[B1]]]
type o = OptionMapperDSL.arg[B1, P1]
}
/** Extension methods for all Columns and all primitive values that can be lifted to Columns */
final class AnyExtensionMethods(val n: Node) extends AnyVal {
def asColumnOf[U : TypedType] = Library.Cast.column[U](n)
def asColumnOfType[U : TypedType](typeName: String) =
Library.Cast.column[U](n, LiteralNode(implicitly[TypedType[U]], typeName))
}
/** Extension methods for all Columns */
trait ColumnExtensionMethods[B1, P1] extends Any with ExtensionMethods[B1, P1] {
def c: Column[P1]
@deprecated("Use 'isEmpty' instead of 'isNull'", "2.1")
def isNull = Library.==.column[Boolean](n, LiteralNode(null))
@deprecated("Use 'isDefined' instead of 'isNotNull'", "2.1")
def isNotNull = Library.Not.column[Boolean](Library.==.typed[Boolean](n, LiteralNode(null)))
def === [P2, R](e: Column[P2])(implicit om: o#arg[B1, P2]#to[Boolean, R]) =
om.column(Library.==, n, e.toNode)
@deprecated("Use '===' instead of 'is'", "2.1")
def is[P2, R](e: Column[P2])(implicit om: o#arg[B1, P2]#to[Boolean, R]) =
=== [P2, R](e)
@deprecated("Use '=!=' instead of 'isNot'", "2.1")
def isNot[P2, R](e: Column[P2])(implicit om: o#arg[B1, P2]#to[Boolean, R]) =
=!= [P2, R](e)
def =!= [P2, R](e: Column[P2])(implicit om: o#arg[B1, P2]#to[Boolean, R]) =
om.column(Library.Not, Library.==.typed(om.liftedType, n, e.toNode))
def < [P2, R](e: Column[P2])(implicit om: o#arg[B1, P2]#to[Boolean, R]) =
om.column(Library.<, n, e.toNode)
def <= [P2, R](e: Column[P2])(implicit om: o#arg[B1, P2]#to[Boolean, R]) =
om.column(Library.<=, n, e.toNode)
def > [P2, R](e: Column[P2])(implicit om: o#arg[B1, P2]#to[Boolean, R]) =
om.column(Library.>, n, e.toNode)
def >= [P2, R](e: Column[P2])(implicit om: o#arg[B1, P2]#to[Boolean, R]) =
om.column(Library.>=, n, e.toNode)
def in[P2, R, C[_]](e: Query[Column[P2], _, C])(implicit om: o#arg[B1, P2]#to[Boolean, R]) =
om.column(Library.In, n, e.toNode)
def inSet[R](seq: Traversable[B1])(implicit om: o#to[Boolean, R]) =
if(seq.isEmpty) om(LiteralColumn(false))
else om.column(Library.In, n, ProductNode(seq.map{ v => LiteralNode(implicitly[TypedType[B1]], v) }.toSeq))
def inSetBind[R](seq: Traversable[B1])(implicit om: o#to[Boolean, R]) =
if(seq.isEmpty) om(LiteralColumn(false))
else om.column(Library.In, n, ProductNode(seq.map(v => LiteralNode(implicitly[TypedType[B1]], v, vol = true)).toSeq))
def between[P2, P3, R](start: Column[P2], end: Column[P3])(implicit om: o#arg[B1, P2]#arg[B1, P3]#to[Boolean, R]) =
om.column(Library.Between, n, start.toNode, end.toNode)
def ifNull[B2, P2, R](e: Column[P2])(implicit om: o#arg[B2, P2]#to[Boolean, R]): Column[P2] =
Library.IfNull.column[P2](n, e.toNode)(e.tpe)
}
final class PlainColumnExtensionMethods[P1](val c: Column[P1]) extends AnyVal with ColumnExtensionMethods[P1, P1] {
def ? : Column[Option[P1]] = Column.forNode(OptionApply(c.toNode))(c.tpe.optionType)
}
final class OptionColumnExtensionMethods[B1](val c: Column[Option[B1]]) extends AnyVal with ColumnExtensionMethods[B1, Option[B1]] {
def getOrElse(default: => B1): Column[B1] =
Column.forNode[B1](GetOrElse(c.toNode, () => default))(c.tpe.asInstanceOf[OptionType].elementType.asInstanceOf[TypedType[B1]])
def get: Column[B1] =
getOrElse { throw new SlickException("Read NULL value for column "+this) }
/** Check if this Option column is empty (i.e. the underlying value is NULL) */
def isEmpty = Library.==.column[Boolean](n, LiteralNode(null))
/** Check if this Option column is not empty (i.e. the underlying value is not NULL) */
def isDefined = Library.Not.column[Boolean](Library.==.typed[Boolean](n, LiteralNode(null)))
/** Check if this Option column is not empty (i.e. the underlying value is not NULL) */
def nonEmpty = isDefined
}
/** Extension methods for numeric Columns */
final class NumericColumnExtensionMethods[B1, P1](val c: Column[P1]) extends AnyVal with ExtensionMethods[B1, P1] {
def + [P2, R](e: Column[P2])(implicit om: o#arg[B1, P2]#to[B1, R]) =
om.column(Library.+, n, e.toNode)
def - [P2, R](e: Column[P2])(implicit om: o#arg[B1, P2]#to[B1, R]) =
om.column(Library.-, n, e.toNode)
def * [P2, R](e: Column[P2])(implicit om: o#arg[B1, P2]#to[B1, R]) =
om.column(Library.*, n, e.toNode)
def / [P2, R](e: Column[P2])(implicit om: o#arg[B1, P2]#to[B1, R]) =
om.column(Library./, n, e.toNode)
def % [P2, R](e: Column[P2])(implicit om: o#arg[B1, P2]#to[B1, R]) =
om.column(Library.%, n, e.toNode)
def abs = Library.Abs.column[P1](n)
def ceil = Library.Ceiling.column[P1](n)
def floor = Library.Floor.column[P1](n)
def sign[R](implicit om: o#to[Int, R]) =
om.column(Library.Sign, n)
def toDegrees = Library.Degrees.column[P1](n)
def toRadians = Library.Radians.column[P1](n)
}
/** Extension methods for Column[Boolean] and Column[Option[Boolean]] */
final class BooleanColumnExtensionMethods[P1](val c: Column[P1]) extends AnyVal with ExtensionMethods[Boolean, P1] {
def &&[P2, R](b: Column[P2])(implicit om: o#arg[Boolean, P2]#to[Boolean, R]) =
om.column(Library.And, n, b.toNode)
def ||[P2, R](b: Column[P2])(implicit om: o#arg[Boolean, P2]#to[Boolean, R]) =
om.column(Library.Or, n, b.toNode)
def unary_! = Library.Not.column[Boolean](n)
}
/** Extension methods for Column[String] and Column[Option[String]] */
final class StringColumnExtensionMethods[P1](val c: Column[P1]) extends AnyVal with ExtensionMethods[String, P1] {
def length[R](implicit om: o#to[Int, R]) =
om.column(Library.Length, n)
def like[P2, R](e: Column[P2], esc: Char = '\\u0000')(implicit om: o#arg[String, P2]#to[Boolean, R]) =
if(esc == '\\u0000') om.column(Library.Like, n, e.toNode)
else om.column(Library.Like, n, e.toNode, LiteralNode(esc))
def ++[P2, R](e: Column[P2])(implicit om: o#arg[String, P2]#to[String, R]) =
om.column(Library.Concat, n, e.toNode)
def startsWith[R](s: String)(implicit om: o#to[Boolean, R]) =
om.column(Library.StartsWith, n, LiteralNode(s))
def endsWith[R](s: String)(implicit om: o#to[Boolean, R]) =
om.column(Library.EndsWith, n, LiteralNode(s))
def toUpperCase = Library.UCase.column[P1](n)
def toLowerCase = Library.LCase.column[P1](n)
def ltrim = Library.LTrim.column[P1](n)
def rtrim = Library.RTrim.column[P1](n)
def trim = Library.Trim.column[P1](n)
}
/** Extension methods for Queries of a single Column */
final class SingleColumnQueryExtensionMethods[B1, P1, C[_]](val q: Query[Column[P1], _, C]) extends AnyVal {
type OptionTM = TypedType[Option[B1]]
def min(implicit tm: OptionTM) = Library.Min.column[Option[B1]](q.toNode)
def max(implicit tm: OptionTM) = Library.Max.column[Option[B1]](q.toNode)
def avg(implicit tm: OptionTM) = Library.Avg.column[Option[B1]](q.toNode)
def sum(implicit tm: OptionTM) = Library.Sum.column[Option[B1]](q.toNode)
}
trait ExtensionMethodConversions {
implicit def anyColumnExtensionMethods[B1 : BaseTypedType](c: Column[B1]) = new AnyExtensionMethods(c.toNode)
implicit def anyOptionColumnExtensionMethods[B1](c: Column[Option[B1]]) = new AnyExtensionMethods(c.toNode)
implicit def anyValueExtensionMethods[B1 : BaseTypedType](v: B1) = new AnyExtensionMethods(LiteralNode(implicitly[TypedType[B1]], v))
implicit def anyOptionValueExtensionMethods[B1 : TypedType](v: Option[B1]) = new AnyExtensionMethods(LiteralNode(implicitly[TypedType[Option[B1]]], v))
implicit def columnExtensionMethods[B1 : BaseTypedType](c: Column[B1]) = new PlainColumnExtensionMethods[B1](c)
implicit def optionColumnExtensionMethods[B1](c: Column[Option[B1]]) = new OptionColumnExtensionMethods[B1](c)
implicit def numericColumnExtensionMethods[B1](c: Column[B1])(implicit tm: BaseTypedType[B1] with NumericTypedType) = new NumericColumnExtensionMethods[B1, B1](c)
implicit def numericOptionColumnExtensionMethods[B1](c: Column[Option[B1]])(implicit tm: BaseTypedType[B1] with NumericTypedType) = new NumericColumnExtensionMethods[B1, Option[B1]](c)
implicit def stringColumnExtensionMethods(c: Column[String]) = new StringColumnExtensionMethods[String](c)
implicit def stringOptionColumnExtensionMethods(c: Column[Option[String]]) = new StringColumnExtensionMethods[Option[String]](c)
implicit def booleanColumnExtensionMethods(c: Column[Boolean]) = new BooleanColumnExtensionMethods[Boolean](c)
implicit def booleanOptionColumnExtensionMethods(c: Column[Option[Boolean]]) = new BooleanColumnExtensionMethods[Option[Boolean]](c)
implicit def singleColumnQueryExtensionMethods[B1 : BaseTypedType, C[_]](q: Query[Column[B1], _, C]) = new SingleColumnQueryExtensionMethods[B1, B1, C](q)
implicit def singleOptionColumnQueryExtensionMethods[B1, C[_]](q: Query[Column[Option[B1]], _, C]) = new SingleColumnQueryExtensionMethods[B1, Option[B1], C](q)
}
| dvinokurov/slick | src/main/scala/scala/slick/lifted/ExtensionMethods.scala | Scala | bsd-2-clause | 9,471 |
package fpinscala.errorhandling
import scala.{Option => _, Either => _, Left => _, Right => _, _} // hide std library `Option` and `Either`, since we are writing our own in this chapter
sealed trait Either[+E,+A] {
def map[B](f: A => B): Either[E, B] = this match {
case Left(e) => Left(e)
case Right(a) => Right(f(a))
}
def flatMap[EE >: E, B](f: A => Either[EE, B]): Either[EE, B] = this match {
case Left(e) => Left(e)
case Right(a) => f(a)
}
def orElse[EE >: E, B >: A](b: => Either[EE, B]): Either[EE, B] = this match {
case Left(_) => b
case Right(a) => Right(a)
}
// def map2[EE >: E, B, C](b: Either[EE, B])(f: (A, B) => C): Either[EE, C] =
// (this,b) match {
// case (Right(a),Right(b)) => Right(f(a,b))
// case (Left(e),_) => Left(e)
// case (_, Left(e)) => Left(e)
// }
def map2[EE >: E, B, C](b: Either[EE, B])(f: (A, B) => C): Either[EE, C] =
for {
a <- this
b1 <- b
} yield f(a, b1)
}
case class Left[+E](get: E) extends Either[E,Nothing]
case class Right[+A](get: A) extends Either[Nothing,A]
object Either {
def traverse[E,A,B](es: List[A])(f: A => Either[E, B]): Either[E, List[B]] =
es match {
case Nil => Right(Nil)
// Q: What is going on with the (_ :: _)?
// Where are we passing that append function to?
// A: Oh wait... it's going to map2.
case a::as => (f(a) map2 traverse(as)(f))(_ :: _)
}
def sequence[E,A](es: List[Either[E,A]]): Either[E,List[A]] =
traverse(es)((e) => e)
def mean(xs: IndexedSeq[Double]): Either[String, Double] =
if (xs.isEmpty)
Left("mean of empty list!")
else
Right(xs.sum / xs.length)
def safeDiv(x: Int, y: Int): Either[Exception, Int] =
try Right(x / y)
catch { case e: Exception => Left(e) }
def Try[A](a: => A): Either[Exception, A] =
try Right(a)
catch { case e: Exception => Left(e) }
/*
* Exercise 4.8
* I would change map2 to collect all errors rather than mkPerson because
* map2 currently kicks out as soon as the first error is encountered.
* mkPerson is agnostic to this.
* I don't think you need to create a new datatype rather than Either
* because you could just use an Either[List[String],Person].
* They would all have to be changed to append errors to a list of strings
* rather than just returning the first error.
*/
} | mikemole/fpinscala | exercises/src/main/scala/fpinscala/errorhandling/Either.scala | Scala | mit | 2,361 |
/*
biojava-adam BioJava and ADAM integration.
Copyright (c) 2017-2022 held jointly by the individual authors.
This library is free software; you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published
by the Free Software Foundation; either version 3 of the License, or (at
your option) any later version.
This library is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; with out even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
License for more details.
You should have received a copy of the GNU Lesser General Public License
along with this library; if not, write to the Free Software Foundation,
Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
> http://www.fsf.org/licensing/licenses/lgpl.html
> http://www.opensource.org/licenses/lgpl-license.php
*/
import org.slf4j.LoggerFactory
val logger = LoggerFactory.getLogger("loadGenbankProteinFeatures")
import org.apache.log4j.{ Level, Logger }
Logger.getLogger("loadGenbankProteinFeatures").setLevel(Level.INFO)
Logger.getLogger("org.biojava").setLevel(Level.INFO)
import org.biojava.nbio.adam.BiojavaAdamContext
val bac = BiojavaAdamContext(sc)
val inputPath = Option(System.getenv("INPUT"))
val outputPath = Option(System.getenv("OUTPUT"))
if (inputPath.isEmpty || outputPath.isEmpty) {
logger.error("INPUT and OUTPUT environment variables are required")
System.exit(1)
}
val features = bac.loadGenbankProteinFeatures(inputPath.get)
logger.info("Saving protein sequence features to output path %s ...".format(outputPath.get))
features.save(outputPath.get, asSingleFile = true, disableFastConcat = false)
logger.info("Done")
System.exit(0)
| heuermh/biojava-adam | scripts/loadGenbankProteinFeatures.scala | Scala | lgpl-3.0 | 1,829 |
package com.github.tyang513.batch.demo
import scala.collection.GenSeq
/**
* Hello world!
*
*/
object App {
def main(args: Array[String]): Unit = {
println("x")
println(2.to(10))
val m = Array(1, 2, 3, 4, 5, 6)
val m1 = m.map(x => x + 1)
m1.foreach(println)
val l = List(4)
println(l.head)
println(l.tail)
val l2 = 9 :: List(4, 2)
l2.foreach(x => println(x))
val double = (x : Int) => {x * 2}
println(double(2))
val sele = App.selector("s") _
sele("ssss")
println("&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&&")
val numbers = List(2, 4, 6, 8)
val ma = numbers.fold(0)((m: Int, n: Int) => {println("m = " + m + " n = " + n + " m + n = " + (m + n)); m + n})
println(ma)
val days = Array(1, 2, 3, 4, 5, 6)
days.zipWithIndex.foreach{case(x, i) => println(x + " = " + i)}
days.foreach{ x => println(x)}
for ((x, i) <- days.zip(Stream from 1)){
println(x + " " + i)
}
val x = List(15, 10, 5, 8, 20, 12)
x.groupBy(_ > 10).foreach(println)
println (x.partition(_ > 10))
println("======================")
numbers.foreach(print)
val sa = GenSeq("a", "bb", "ccc", "dddd")
val max = (x : Int, y : String) => {println("max " + x + " " + y); 0+y.length}
val sum = (x : Int, y : Int) => {println("sum " + x + " " + y); x + y}
val b = sa.aggregate(0)(max(_, _), sum(_, _))
val c = sa.foldLeft(0)(max(_, _))
println(b)
println(c)
println("========================= aggregate example ")
val aex = List(1,2,3,4,5,6)
def myfunc(index: Int, iter: Iterator[(String, Int)]) : Iterator[String] = {
iter.toList.map(x => "[partID:" + index + ", val: " + x + "]").iterator
}
println("==========aex.par.aggregate(0)(math.max(_, _), _ + _)")
println(aex.par.aggregate(0)(math.max(_, _), _ + _))
(aex).iterator.map(println)
println(aex.aggregate(0)(math.max(_,_), _ + _))
println(aex.aggregate(10)(math.max(_,_), _ + _) )
println(List('a', 'b', 'c').aggregate(0)({ (sum, ch) => println("" + sum + " = " + ch.toInt); sum + ch.toInt }, { (p1, p2) => p1 + p2 }))
println(aex.foldLeft(0)((m, n) => m +n ))
}
def selector(s1 : String)(status : String)(`type` : Int) : String = {
println("ssssssssssssssssssssssssssssssss")
println(s1)
println(status)
println(`type`)
"s"
}
}
| tyang513/spark-batch-example | src/main/scala/com/github/tyang513/batch/demo/App.scala | Scala | gpl-3.0 | 2,450 |
package uk.co.morleydev.ghosthunt.model.component.game
import uk.co.morleydev.ghosthunt.model.net.ClientId
/**
* A remote entity is an entity that is somehow controlled by a remote player,
* so is any actor that is not the local actor (and every actor on the server)
*
* @param id
*/
case class Remote(id : ClientId)
| MorleyDev/GhostHunt | src/main/scala/uk/co/morleydev/ghosthunt/model/component/game/Remote.scala | Scala | mit | 324 |
package org.scalaide.ui.wizards
import scala.util.{ Try, Success, Failure }
import org.eclipse.core.resources.IContainer
import org.eclipse.core.resources.IFile
import org.eclipse.core.resources.IResource
import org.eclipse.core.runtime.IPath
import org.eclipse.core.runtime.Path
import org.scalaide.core.IScalaPlugin
import org.scalaide.core.compiler.IScalaPresentationCompiler.Implicits._
import org.scalaide.core.internal.project.ScalaProject
import org.scalaide.util.internal.Commons
import org.scalaide.util.internal.eclipse.ProjectUtils
import scalariform.lexer._
object ScalaFileCreator {
val VariableTypeName = "type_name"
val VariablePackageName = "package_name"
import scala.reflect.runtime._
private[this] val st = universe.asInstanceOf[JavaUniverse]
val ScalaKeywords = st.nme.keywords map (_.toString())
val JavaKeywords = st.javanme.keywords map (_.toString())
}
trait ScalaFileCreator extends FileCreator {
import ScalaFileCreator._
import ProjectUtils._
private[wizards] type FileExistenceCheck = IContainer => Validation
override def templateVariables(folder: IContainer, name: String): Map[String, String] =
generateTemplateVariables(name)
override def initialPath(res: IResource): String = {
val srcDirs = sourceDirs(res.getProject())
generateInitialPath(
path = res.getFullPath(),
srcDirs = srcDirs,
isDirectory = res.getType() == IResource.FOLDER)
}
override def validateName(folder: IContainer, name: String): Validation = {
if (!ScalaProject.isScalaProject(folder.getProject()))
Invalid("Not a Scala project")
else
doValidation(name) match {
case Left(v) => v
case Right(f) => f(folder)
}
}
override def create(folder: IContainer, name: String): IFile = {
val filePath = name.replace('.', '/')
folder.getFile(new Path(s"$filePath.scala"))
}
override def completionEntries(folder: IContainer, name: String): Seq[String] = {
val ret = projectAsJavaProject(folder.getProject()) map { jp =>
val root = jp.findPackageFragmentRoot(folder.getFullPath())
val pkgs = root.getChildren().map(_.getElementName())
val ignoreCaseMatcher = s"(?i)\\\\Q$name\\\\E.*"
pkgs.filter(_.matches(ignoreCaseMatcher))
}
ret.fold(Seq[String]())(identity)
}
/**
* `path` is the path of the element which is selected when the wizard is
* created. `srcDirs` contains all source folders of the project where `path`
* is part of. `isDirectory` describes if the last element of `path` references
* a directory.
*/
private[wizards] def generateInitialPath(path: IPath, srcDirs: Seq[IPath], isDirectory: Boolean): String = {
srcDirs.find(_.isPrefixOf(path))
.map(srcDir => path.removeFirstSegments(srcDir.segmentCount()))
.map(pkgOrFilePath => if (isDirectory) pkgOrFilePath else pkgOrFilePath.removeLastSegments(1))
.map(_.segments().mkString("."))
.map(pkg => if (pkg.isEmpty()) "" else s"$pkg.")
.getOrElse("")
}
private[wizards] def doValidation(name: String): Either[Invalid, FileExistenceCheck] = {
if (name.isEmpty())
Left(Invalid("No file path specified"))
else
validateFullyQualifiedType(name)
}
private[wizards] def validateFullyQualifiedType(fullyQualifiedType: String): Either[Invalid, FileExistenceCheck] = {
def isValidScalaTypeIdent(inputStr: String) = {
val str = inputStr.trim()
val tokenizeResult = Try(ScalaLexer.tokenise(str, forgiveErrors = false))
tokenizeResult match {
case Success(tokens) => {
val conformsToIdentToken = tokens.size == 2 && tokens(0).tokenType.isId
conformsToIdentToken && !ScalaKeywords.contains(str)
}
case Failure(exception) => {
exception match {
case _: scalariform.lexer.ScalaLexerException => false
case e => throw e
}
}
}
}
val dotsAfterBackQuote = fullyQualifiedType.dropWhile(_ != '`').contains('.')
if (dotsAfterBackQuote) {
Left(Invalid("Dots after a back-quote is not supported for Scala types in the file wizard"))
} else {
val parts = Commons.split(fullyQualifiedType, '.')
if (parts.last.isEmpty)
Left(Invalid("No type name specified"))
else {
def packageIdentCheck =
parts.init.find(!isValidScalaPackageIdent(_)) map (e => s"'$e' is not a valid package name")
def typeIdentCheck =
Seq(parts.last).find(!isValidScalaTypeIdent(_)) map (e => s"'$e' is not a valid type name")
packageIdentCheck orElse typeIdentCheck match {
case Some(e) => Left(Invalid(e))
case _ => Right(checkTypeExists(_, fullyQualifiedType))
}
}
}
}
private[wizards] def isValidScalaPackageIdent(str: String): Boolean = {
val validIdent =
str.nonEmpty &&
Character.isJavaIdentifierStart(str.head) &&
str.tail.forall(Character.isJavaIdentifierPart)
validIdent && !ScalaKeywords.contains(str) && !JavaKeywords.contains(str)
}
private[wizards] def checkTypeExists(folder: IContainer, fullyQualifiedType: String): Validation = {
val path = fullyQualifiedType.replace('.', '/')
if (folder.getFile(new Path(s"$path.scala")).exists())
Invalid("File already exists")
else {
val scalaProject = IScalaPlugin().asScalaProject(folder.getProject())
val typeExists = scalaProject flatMap { scalaProject =>
scalaProject.presentationCompiler { compiler =>
compiler.asyncExec {
compiler.rootMirror.getClassIfDefined(fullyQualifiedType) != compiler.NoSymbol
}.getOption()
}.flatten
} getOrElse false
if (typeExists)
Invalid("Type already exists")
else
Valid
}
}
private[wizards] def generateTemplateVariables(pkg: String): Map[String, String] = {
val splitPos = pkg.lastIndexOf('.')
if (splitPos < 0)
Map(VariableTypeName -> pkg)
else
Map(
VariablePackageName -> pkg.substring(0, splitPos),
VariableTypeName -> pkg.substring(splitPos + 1))
}
}
| scala-ide/scala-ide | org.scala-ide.sdt.core/src/org/scalaide/ui/wizards/ScalaFileCreator.scala | Scala | bsd-3-clause | 6,158 |
package io.youi.app.screen
import org.scalajs.dom.html
import org.scalajs.dom.html.Element
import scala.concurrent.Future
/**
* Preloads Screen element from Scala.js usually through Template
*/
trait PreloadedContentScreen extends ContentScreen {
protected def preloadScreen: html.Element
override protected def generateScreen(): Future[Element] = {
Future.successful(preloadScreen)
}
}
| outr/youi | app/js/src/main/scala/io/youi/app/screen/PreloadedContentScreen.scala | Scala | mit | 405 |
package processors
import com.typesafe.scalalogging.LazyLogging
import org.clulab.odin.Mention
import org.clulab.processors.{ Document, Sentence }
import org.clulab.serialization.json.JSONSerializer
import org.clulab.serialization.json._
import org.clulab.odin.serialization.json.{ JSONSerializer => _, _ }
import org.json4s.JValue
import org.json4s.JsonDSL._
import scala.io.Source
import utils._
object ConverterUtils extends LazyLogging {
// For validating URLs to rule files
// ex. https://raw.githubusercontent.com/clulab/reach/508697db2217ba14cd1fa0a99174816cc3383317/src/main/resources/edu/arizona/sista/demo/open/grammars/rules.yml
val rulesURL = RichRegex("""(https?|ftp).+?\\.(yml|yaml)$""")
@throws(classOf[api.BadURLException])
def urlToRules(url: String): String = rulesURL.matches(url) match {
case true =>
logger.info(s"Retrieving Odin rules from $url")
val page = Source.fromURL(url)
val rules = page.mkString
rules
case false => throw new api.BadURLException(url)
}
def toProcessorsSentence(json: JValue): Sentence = JSONSerializer.toSentence(json)
def toProcessorsDocument(json: JValue): Document = JSONSerializer.toDocument(json)
def toJSON(document: Document): JValue = document.jsonAST
def toJSON(sentence: Sentence): JValue = sentence.jsonAST
def toJSON(mentions: Seq[Mention]): JValue = mentions.jsonAST
def toJSON(error: Throwable): JValue = "error" -> error.getMessage
}
| myedibleenso/processors-server | src/main/scala/processors/ConverterUtils.scala | Scala | apache-2.0 | 1,461 |
/**
* Copyright (C) 2009-2014 Typesafe Inc. <http://www.typesafe.com>
*/
package akka.testkit
import language.postfixOps
import java.lang.ref.WeakReference
import java.util.concurrent.locks.ReentrantLock
import scala.annotation.tailrec
import com.typesafe.config.Config
import akka.actor.{ ActorInitializationException, ExtensionIdProvider, ExtensionId, Extension, ExtendedActorSystem, ActorRef, ActorCell }
import akka.dispatch.{ MessageQueue, MailboxType, TaskInvocation, MessageDispatcherConfigurator, MessageDispatcher, Mailbox, Envelope, DispatcherPrerequisites, DefaultSystemMessageQueue }
import akka.dispatch.sysmsg.{ SystemMessage, Suspend, Resume }
import scala.concurrent.duration._
import akka.util.Switch
import scala.concurrent.duration.Duration
import scala.util.control.NonFatal
import java.util.concurrent.TimeUnit
/*
* Locking rules:
*
* Normal messages are always queued thread locally.
* Processing a queue checks suspendSwitch before each invocation, not processing
* if suspendSwitch.
* When resuming an actor, all messages are atomically scooped from all threads and
* queued on the resuming thread's queue, to be processed immediately.
* Scooping up messages means replacing the ThreadLocal contents with an empty
* new MessageQueue.
*
* All accesses to the queue must be done under the suspendSwitch-switch's lock, so
* within one of its methods taking a closure argument.
*
* System messages always go directly to the actors SystemMessageQueue which isn't thread local.
*/
private[testkit] object CallingThreadDispatcherQueues extends ExtensionId[CallingThreadDispatcherQueues] with ExtensionIdProvider {
override def lookup = CallingThreadDispatcherQueues
override def createExtension(system: ExtendedActorSystem): CallingThreadDispatcherQueues = new CallingThreadDispatcherQueues
}
private[testkit] class CallingThreadDispatcherQueues extends Extension {
// PRIVATE DATA
private var queues = Map[CallingThreadMailbox, Set[WeakReference[MessageQueue]]]()
private var lastGC = 0l
// we have to forget about long-gone threads sometime
private def gc(): Unit = {
queues = (Map.newBuilder[CallingThreadMailbox, Set[WeakReference[MessageQueue]]] /: queues) {
case (m, (k, v)) ⇒
val nv = v filter (_.get ne null)
if (nv.isEmpty) m else m += (k -> nv)
}.result
}
protected[akka] def registerQueue(mbox: CallingThreadMailbox, q: MessageQueue): Unit = synchronized {
if (queues contains mbox) {
val newSet = queues(mbox) + new WeakReference(q)
queues += mbox -> newSet
} else {
queues += mbox -> Set(new WeakReference(q))
}
val now = System.nanoTime
if (now - lastGC > 1000000000l) {
lastGC = now
gc()
}
}
protected[akka] def unregisterQueues(mbox: CallingThreadMailbox): Unit = synchronized {
queues -= mbox
}
/*
* This method must be called with "own" being this thread's queue for the
* given mailbox. When this method returns, the queue will be entered
* (active).
*/
protected[akka] def gatherFromAllOtherQueues(mbox: CallingThreadMailbox, own: MessageQueue): Unit = synchronized {
if (queues contains mbox) {
for {
ref ← queues(mbox)
q = ref.get
if (q ne null) && (q ne own)
} {
val owner = mbox.actor.self
var msg = q.dequeue()
while (msg ne null) {
// this is safe because this method is only ever called while holding the suspendSwitch monitor
own.enqueue(owner, msg)
msg = q.dequeue()
}
}
}
}
}
object CallingThreadDispatcher {
val Id = "akka.test.calling-thread-dispatcher"
}
/**
* Dispatcher which runs invocations on the current thread only. This
* dispatcher does not create any new threads, but it can be used from
* different threads concurrently for the same actor. The dispatch strategy is
* to run on the current thread unless the target actor is either suspendSwitch or
* already running on the current thread (if it is running on a different
* thread, then this thread will block until that other invocation is
* finished); if the invocation is not run, it is queued in a thread-local
* queue to be executed once the active invocation further up the call stack
* finishes. This leads to completely deterministic execution order if only one
* thread is used.
*
* Suspending and resuming are global actions for one actor, meaning they can
* affect different threads, which leads to complications. If messages are
* queued (thread-locally) during the suspendSwitch period, the only thread to run
* them upon resume is the thread actually calling the resume method. Hence,
* all thread-local queues which are not currently being drained (possible,
* since suspend-queue-resume might happen entirely during an invocation on a
* different thread) are scooped up into the current thread-local queue which
* is then executed. It is possible to suspend an actor from within its call
* stack.
*
* @since 1.1
*/
class CallingThreadDispatcher(_configurator: MessageDispatcherConfigurator) extends MessageDispatcher(_configurator) {
import CallingThreadDispatcher._
import configurator.prerequisites._
val log = akka.event.Logging(eventStream, "CallingThreadDispatcher")
override def id: String = Id
protected[akka] override def createMailbox(actor: akka.actor.Cell, mailboxType: MailboxType) =
new CallingThreadMailbox(actor, mailboxType)
protected[akka] override def shutdown() {}
protected[akka] override def throughput = 0
protected[akka] override def throughputDeadlineTime = Duration.Zero
protected[akka] override def registerForExecution(mbox: Mailbox, hasMessageHint: Boolean, hasSystemMessageHint: Boolean): Boolean = false
protected[akka] override def shutdownTimeout = 1 second
protected[akka] override def register(actor: ActorCell): Unit = {
super.register(actor)
actor.mailbox match {
case mbox: CallingThreadMailbox ⇒
val queue = mbox.queue
runQueue(mbox, queue)
case x ⇒ throw ActorInitializationException("expected CallingThreadMailbox, got " + x.getClass)
}
}
protected[akka] override def unregister(actor: ActorCell): Unit = {
val mbox = actor.mailbox match {
case m: CallingThreadMailbox ⇒ Some(m)
case _ ⇒ None
}
super.unregister(actor)
mbox foreach CallingThreadDispatcherQueues(actor.system).unregisterQueues
}
protected[akka] override def suspend(actor: ActorCell) {
actor.mailbox match {
case m: CallingThreadMailbox ⇒ { m.suspendSwitch.switchOn; m.suspend() }
case m ⇒ m.systemEnqueue(actor.self, Suspend())
}
}
protected[akka] override def resume(actor: ActorCell) {
actor.mailbox match {
case mbox: CallingThreadMailbox ⇒
val queue = mbox.queue
val switched = mbox.suspendSwitch.switchOff {
CallingThreadDispatcherQueues(actor.system).gatherFromAllOtherQueues(mbox, queue)
mbox.resume()
}
if (switched)
runQueue(mbox, queue)
case m ⇒ m.systemEnqueue(actor.self, Resume(causedByFailure = null))
}
}
protected[akka] override def systemDispatch(receiver: ActorCell, message: SystemMessage) {
receiver.mailbox match {
case mbox: CallingThreadMailbox ⇒
mbox.systemEnqueue(receiver.self, message)
runQueue(mbox, mbox.queue)
case m ⇒ m.systemEnqueue(receiver.self, message)
}
}
protected[akka] override def dispatch(receiver: ActorCell, handle: Envelope) {
receiver.mailbox match {
case mbox: CallingThreadMailbox ⇒
val queue = mbox.queue
val execute = mbox.suspendSwitch.fold {
queue.enqueue(receiver.self, handle)
false
} {
queue.enqueue(receiver.self, handle)
true
}
if (execute) runQueue(mbox, queue)
case m ⇒ m.enqueue(receiver.self, handle)
}
}
protected[akka] override def executeTask(invocation: TaskInvocation) { invocation.run }
/*
* This method must be called with this thread's queue.
*
* If the catch block is executed, then a non-empty mailbox may be stalled as
* there is no-one who cares to execute it before the next message is sent or
* it is suspendSwitch and resumed.
*/
@tailrec
private def runQueue(mbox: CallingThreadMailbox, queue: MessageQueue, interruptedEx: InterruptedException = null) {
def checkThreadInterruption(intEx: InterruptedException): InterruptedException = {
if (Thread.interrupted()) { // clear interrupted flag before we continue, exception will be thrown later
val ie = new InterruptedException("Interrupted during message processing")
log.error(ie, "Interrupted during message processing")
ie
} else intEx
}
def throwInterruptionIfExistsOrSet(intEx: InterruptedException): Unit = {
val ie = checkThreadInterruption(intEx)
if (ie ne null) {
Thread.interrupted() // clear interrupted flag before throwing according to java convention
throw ie
}
}
@tailrec
def process(intEx: InterruptedException): InterruptedException = {
var intex = intEx
val recurse = {
mbox.processAllSystemMessages()
val handle = mbox.suspendSwitch.fold[Envelope](null) {
if (mbox.isClosed) null else queue.dequeue()
}
if (handle ne null) {
try {
if (Mailbox.debug) println(mbox.actor.self + " processing message " + handle)
mbox.actor.invoke(handle)
intex = checkThreadInterruption(intex)
true
} catch {
case ie: InterruptedException ⇒
log.error(ie, "Interrupted during message processing")
Thread.interrupted() // clear interrupted flag before we continue, exception will be thrown later
intex = ie
true
case NonFatal(e) ⇒
log.error(e, "Error during message processing")
false
}
} else false
}
if (recurse) process(intex)
else intex
}
// if we own the lock then we shouldn't do anything since we are processing
// this actors mailbox at some other level on our call stack
if (!mbox.ctdLock.isHeldByCurrentThread) {
var intex = interruptedEx
val gotLock = try {
mbox.ctdLock.tryLock(50, TimeUnit.MILLISECONDS)
} catch {
case ie: InterruptedException ⇒
Thread.interrupted() // clear interrupted flag before we continue, exception will be thrown later
intex = ie
false
}
if (gotLock) {
val ie = try {
process(intex)
} finally {
mbox.ctdLock.unlock
}
throwInterruptionIfExistsOrSet(ie)
} else {
// if we didn't get the lock and our mailbox still has messages, then we need to try again
if (mbox.hasSystemMessages || mbox.hasMessages) {
runQueue(mbox, queue, intex)
} else {
throwInterruptionIfExistsOrSet(intex)
}
}
}
}
}
class CallingThreadDispatcherConfigurator(config: Config, prerequisites: DispatcherPrerequisites)
extends MessageDispatcherConfigurator(config, prerequisites) {
private val instance = new CallingThreadDispatcher(this)
override def dispatcher(): MessageDispatcher = instance
}
class CallingThreadMailbox(_receiver: akka.actor.Cell, val mailboxType: MailboxType)
extends Mailbox(null) with DefaultSystemMessageQueue {
val system = _receiver.system
val self = _receiver.self
private val q = new ThreadLocal[MessageQueue]() {
override def initialValue = {
val queue = mailboxType.create(Some(self), Some(system))
CallingThreadDispatcherQueues(system).registerQueue(CallingThreadMailbox.this, queue)
queue
}
}
/**
* This is only a marker to be put in the messageQueue’s stead to make error
* messages pertaining to violated mailbox type requirements less cryptic.
*/
override val messageQueue: MessageQueue = q.get
override def enqueue(receiver: ActorRef, msg: Envelope): Unit = q.get.enqueue(receiver, msg)
override def dequeue(): Envelope = throw new UnsupportedOperationException("CallingThreadMailbox cannot dequeue normally")
override def hasMessages: Boolean = q.get.hasMessages
override def numberOfMessages: Int = 0
def queue = q.get
val ctdLock = new ReentrantLock
val suspendSwitch = new Switch
override def cleanUp(): Unit = {
/*
* This is called from dispatcher.unregister, i.e. under this.lock. If
* another thread obtained a reference to this mailbox and enqueues after
* the gather operation, tough luck: no guaranteed delivery to deadLetters.
*/
suspendSwitch.locked {
val qq = queue
CallingThreadDispatcherQueues(actor.system).gatherFromAllOtherQueues(this, qq)
super.cleanUp()
qq.cleanUp(actor.self, actor.dispatcher.mailboxes.deadLetterMailbox.messageQueue)
q.remove()
}
}
}
| Fincore/org.spark-project.akka | testkit/src/main/scala/akka/testkit/CallingThreadDispatcher.scala | Scala | mit | 13,164 |
package one.lockstep.multilock.protocol
import scodec.Codec
sealed trait Party
case object Client extends Party //there is always a single client in the context of a session
case class Server(id: String) extends Party
object Party {
implicit lazy val codec: Codec[Party] = ???
}
| lockstep-one/vault | vault-common/src/main/scala/one/lockstep/multilock/protocol/Party.scala | Scala | agpl-3.0 | 285 |
/* *\\
** Squants **
** **
** Scala Quantities and Units of Measure Library and DSL **
** (c) 2013-2014, Gary Keorkunian **
** **
\\* */
package squants.photo
import org.scalatest.{ Matchers, FlatSpec }
import scala.language.postfixOps
import squants.time.Seconds
import squants.space.{ SquaredRadians, SquareMeters }
import squants.QuantityStringParseException
/**
* @author garyKeorkunian
* @since 0.1
*
*/
class LuminousFluxSpec extends FlatSpec with Matchers {
behavior of "LuminousFlux and its Units of Measure"
it should "create values using UOM factories" in {
Lumens(1).toLumens should be(1)
}
it should "create values from properly formatted Strings" in {
LuminousFlux("10.22 lm").get should be(Lumens(10.22))
LuminousFlux("10.45 zz").failed.get should be(QuantityStringParseException("Unable to parse LuminousFlux", "10.45 zz"))
LuminousFlux("zz lm").failed.get should be(QuantityStringParseException("Unable to parse LuminousFlux", "zz lm"))
}
it should "properly convert to all supported Units of Measure" in {
val x = Lumens(1)
x.toLumens should be(1)
}
it should "return properly formatted strings for all supported Units of Measure" in {
Lumens(1).toString(Lumens) should be("1.0 lm")
}
it should "return LuminousEnergy when multiplied by Time" in {
Lumens(1) * Seconds(1) should be(LumenSeconds(1))
}
it should "return Illuminance when divided by Area" in {
Lumens(1) / SquareMeters(1) should be(Lux(1))
}
it should "return LuminousIntensity when divided by SolidAngle" in {
Lumens(1) / SquaredRadians(1) should be(Candelas(1))
}
it should "return Area when divided by Illuminance" in {
Lumens(1) / Lux(1) should be(SquareMeters(1))
}
it should "return SolidAngle when divided by LuminousIntensity" in {
Lumens(1) / Candelas(1) should be(SquaredRadians(1))
}
behavior of "LuminousFluxConversions"
it should "provide aliases for single unit values" in {
import LuminousFluxConversions._
lumen should be(Lumens(1))
}
it should "provide implicit conversion from Double" in {
import LuminousFluxConversions._
val d = 10d
d.lumens should be(Lumens(d))
}
it should "provide Numeric support" in {
import LuminousFluxConversions.LuminousFluxNumeric
val lfs = List(Lumens(100), Lumens(1))
lfs.sum should be(Lumens(101))
}
}
| non/squants | src/test/scala/squants/photo/LuminousFluxSpec.scala | Scala | apache-2.0 | 2,777 |
package com.github.gdefacci.raz
import scalaz.{ -\\/, \\/, \\/- }
import java.net.URLDecoder
private[raz] object DecodeUtils {
private val percentTripletSize = 3
private val hexadecimal = 16
def replacePercentTriplets(s: String): String = {
var needToChange = false;
val encoding: String = "UTF-8"
val numChars = s.length();
val sb = new StringBuilder(numChars);
var i = 0
while (i < numChars) {
var c = s.charAt(i)
c match {
case '%' =>
needToChange = true
var pos = 0
val bytes = Array.ofDim[Byte]((numChars - i) / percentTripletSize)
while ((i + percentTripletSize - 1) < numChars && c == '%') {
val v = Integer.parseInt(s.substring(i + 1, i + percentTripletSize), hexadecimal);
if (v < 0)
throw new IllegalArgumentException("Illegal hex characters in escape (%) pattern - negative value");
bytes(pos) = v.asInstanceOf[Byte]
pos += 1
i += percentTripletSize
if (i < numChars)
c = s.charAt(i);
}
if ((i < numChars) && (c == '%'))
throw new IllegalArgumentException(
"Incomplete trailing escape (%) pattern");
sb.append(new String(bytes, 0, pos, encoding));
case x =>
sb.append(c)
i += 1
}
}
if (needToChange) sb.toString else s
}
def subtract(from: Path, what: Path): Throwable \\/ Path = {
PathDecoder.path(what).decode(from).map(mr => mr.rest)
}
def subtract(from: UriTemplate, prefix: Path): Throwable \\/ UriTemplate = subtract(from, UriTemplate(prefix))
def subtract(from: UriTemplate, prefix: UriTemplate): Throwable \\/ UriTemplate = {
lazy val error = -\\/(new RuntimeException(s"${prefix.render} is not a prefix of ${from.render}"))
if (from.scheme != prefix.scheme || from.authority != prefix.authority) error
else prefix match {
case UriTemplate(_, _, segments, pars, fragment) if pars.isEmpty && fragment.isEmpty =>
if (from.segments.startsWith(segments)) \\/-(from.copy(scheme = None, authority = None, segments = from.segments.drop(segments.length)))
else error
case UriTemplate(_, _, segments, pars, fragment) if from.segments == segments =>
val remainingPars = collection.mutable.Buffer.empty[UriTemplate.Param]
var it = from.params.iterator
while (it.hasNext) {
val nxt = it.next
if (pars.indexOf(nxt) < 0) {
remainingPars += nxt
}
}
fragment match {
case None => \\/-(from.copy(scheme = None, authority = None, segments = Nil, params = remainingPars))
case Some(frag) if remainingPars.isEmpty && fragment == from.fragment => \\/-(UriTemplate)
case Some(frag) => error
}
case _ => error
}
}
private def paramDecode = URLDecoder.decode(_: String, "UTF-8")
private def parseParams(queryString: String): Throwable \\/ Seq[(String, Option[String])] = {
val zpars: Throwable \\/ Seq[(String, Option[String])] = \\/-(Nil)
Option(queryString) match {
case None => zpars
case Some(q) =>
q.split("&").filter(_.nonEmpty).toSeq.foldLeft(zpars) { (acc, itm) =>
acc.flatMap { rpars =>
itm.split("=").filter(_.nonEmpty).toSeq match {
case Seq(k) => \\/-(rpars :+ (paramDecode(k) -> None))
case Seq(k, v) => \\/-(rpars :+ (paramDecode(k) -> Some(paramDecode(v))))
case _ => -\\/(new RuntimeException(s"invalid argument pair $itm"))
}
}
}
}
}
def fromJavaUrl(u: java.net.URL): Throwable \\/ Path = {
val params = parseParams(u.getQuery)
val frg = Option(u.getRef).map(DecodeUtils.replacePercentTriplets)
for (scheme <- Scheme.fromString(u.getProtocol); pars <- params) yield Path(Some(scheme),
Some(Authority(u.getHost, if (u.getPort > 0) u.getPort else 80)),
u.getPath.split("/").filter(_.nonEmpty).map(DecodeUtils.replacePercentTriplets).toList,
pars,
frg)
}
def fromJavaUri(uri: java.net.URI): Throwable \\/ Path = {
for {
scheme <- Option(uri.getScheme) match {
case None => \\/-(None)
case Some(v) => Scheme.fromString(uri.getScheme).map(Some(_))
}
port = uri.getPort
authority = Option(uri.getHost).map(hst => Authority(hst, if (port < 0) 80 else port))
path = uri.getPath
pth = if (path.startsWith("/")) path.substring(1) else path
pathParts = pth.split("/").filter(_.length > 0).map(replacePercentTriplets(_))
params <- parseParams(uri.getRawQuery)
fragment = Option(uri.getFragment)
} yield (Path(scheme, authority, pathParts.toList, params, fragment))
}
} | gdefacci/raz | raz/src/main/scala/com/github/gdefacci/raz/DecodeUtils.scala | Scala | mit | 4,803 |
/*
* Copyright (c) 2011-2017 Interfaculty Department of Geoinformatics, University of
* Salzburg (Z_GIS) & Institute of Geological and Nuclear Sciences Limited (GNS Science)
* in the SMART Aquifer Characterisation (SAC) programme funded by the New Zealand
* Ministry of Business, Innovation and Employment (MBIE)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import models.metadata.ValidValues
/**
* Specification test for {@link ValidValues}
*/
class ValidValuesSpec extends WithDefaultTest {
"Instantiation " should {
"succeed on values list + standard value" in {
new ValidValues(0, List("test", "test2"), None)
}
"succeed on values list + descriptions + standard value" in {
new ValidValues(0, List("test", "test2"), Some(List("Description", "Description 2")))
}
"fail on empty values list" in {
val thrown = intercept[IllegalArgumentException] {
new ValidValues(0, List(), None)
}
thrown.getMessage mustEqual "requirement failed: values list must not be empty"
}
"fail on values and descriptions list of different size" in {
val thrown = intercept[IllegalArgumentException] {
new ValidValues(0, List("test"), Some(List("Description", "Description 2")))
}
thrown.getMessage mustEqual "requirement failed: decriptions list must either be None or same length as values list"
}
"fail on standardValue too big" in {
val thrown = intercept[IllegalArgumentException] {
new ValidValues(4, List("test", "test2"), Some(List("Description", "Description 2")))
}
thrown.getMessage mustEqual "requirement failed: standardValue must be within values list length"
}
"fail on standardValue too small" in {
val thrown = intercept[IllegalArgumentException] {
new ValidValues(-4, List("test", "test2"), Some(List("Description", "Description 2")))
}
thrown.getMessage mustEqual "requirement failed: standardValue must be within values list length"
}
"fail on non-unique values" in{
val thrown = intercept[IllegalArgumentException] {
new ValidValues(1, List("test", "test2", "test"), None)
}
thrown.getMessage mustEqual "requirement failed: all values must be unique"
}
"fail on non-unique descriptions" in{
val thrown = intercept[IllegalArgumentException] {
new ValidValues(1, List("test", "test2", "test3"), Some(List("Description", "Description", "Description 3")))
}
thrown.getMessage mustEqual "requirement failed: all descriptions must be unique"
}
}
}
| ZGIS/smart-portal-backend | test/ValidValuesSpec.scala | Scala | apache-2.0 | 3,112 |
package shopScala.queries
import java.util.concurrent.CountDownLatch
import org.mongodb.scala._
import org.mongodb.scala.model.Filters
import org.reactivestreams.Publisher
import rx.lang.scala.{JavaConversions, Observable}
import shopScala.util.Constants._
import shopScala.util.Util._
import shopScala.util._
import shopScala.util.conversion.RxStreamsConversions
/*
For conversions from MongoDBObservable to RxStreams Publisher see:
https://mongodb.github.io/mongo-scala-driver/1.1/integrations/
https://github.com/mongodb/mongo-scala-driver/tree/master/examples/src/test/scala/rxStreams
*/
object QueryS08SDriverRxStreamsWithObservables extends App {
type MongoObservable[T] = org.mongodb.scala.Observable[T]
object dao {
val client: MongoClient = MongoClient(MONGODB_URI)
val db: MongoDatabase = client.getDatabase(SHOP_DB_NAME)
val usersCollection: MongoCollection[Document] = db.getCollection(USERS_COLLECTION_NAME)
val ordersCollection: MongoCollection[Document] = db.getCollection(ORDERS_COLLECTION_NAME)
private def _findUserByName(name: String): MongoObservable[User] = {
usersCollection
.find(Filters.eq("_id", name))
.first()
.map(doc => User(doc))
}
private def _findOrdersByUsername(username: String): MongoObservable[Order] = {
ordersCollection
.find(Filters.eq("username", username))
.map(doc => Order(doc))
}
def findUserByName(name: String): Publisher[User] = {
RxStreamsConversions.observableToPublisher(_findUserByName(name))
}
def findOrdersByUsername(username: String): Publisher[Order] = {
RxStreamsConversions.observableToPublisher(_findOrdersByUsername(username))
}
} // end dao
def logIn(credentials: Credentials): Observable[String] = {
publisherToObservable(dao.findUserByName(credentials.username))
.single
.map(user => checkCredentials(user, credentials))
.map(user => user.name)
}
def processOrdersOf(username: String): Observable[Result] = {
val obsOrders: Observable[Order] = publisherToObservable(dao.findOrdersByUsername(username))
val obsPairs: Observable[(Int, Int)] = obsOrders.map(order => (order.amount, 1))
val obsPair: Observable[(Int, Int)] = obsPairs.foldLeft(0, 0)((t1, t2) => (t1._1 + t2._1, t1._2 + t2._2))
obsPair.map(pair => Result(username, pair._2, pair._1))
}
def publisherToObservable[T](pub: Publisher[T]): Observable[T] = {
val javaObs: rx.Observable[T] = rx.RxReactiveStreams.toObservable(pub)
JavaConversions.toScalaObservable(javaObs)
}
def eCommerceStatistics(credentials: Credentials): Unit = {
println(s"--- Calculating eCommerce statistics for user ${credentials.username} ...")
val latch: CountDownLatch = new CountDownLatch(1)
logIn(credentials)
.flatMap(username => processOrdersOf(username))
.subscribe(result => result.display(),
t => { Console.err.println(t.toString); latch.countDown()},
() => latch.countDown()
)
latch.await()
}
eCommerceStatistics(Credentials(LISA, "password"))
Thread sleep 2000L
eCommerceStatistics(Credentials(LISA, "bad_password"))
Thread sleep 2000L
eCommerceStatistics(Credentials(LISA.toUpperCase, "password"))
Thread sleep 2000L
}
| hermannhueck/reactive-mongo-access | src/main/scala/shopScala/queries/QueryS08SDriverRxStreamsWithObservables.scala | Scala | apache-2.0 | 3,304 |
package net.liftmodules
import _root_.net.liftweb._
import util.{Props}
import http._
/**
* ==FoBo Bootstrap 3 Resource Module==
*
* This resource module provides Bootstrap resource components to the FoBo Bootstrap 3 Toolkit module,
* but can also be used as-is, see below for setup information.
*
* If you are using this module via the FoBo/FoBo module see also [[net.liftmodules.fobo]] for setup information.
*
*/
package object fobobsres {
override def toString() = fobobsres.Resource.toString()
/**
* Initiate FoBo's Bootstrap 3 Resource(s) in you bootstrap liftweb Boot.
*
* @example
* {{{
* import net.liftmodules.{fobobsres => fobo}
* :
* fobo.Resource.init=fobo.Resource.[Resource Object]
* }}}
* '''Note:''' To see available objects click on the round trait icon in the header of this page.
*/
sealed trait Resource
object Resource extends Resource {
//we don't actually need to store the objects (for now) so lets just save
//the object name, we can easily change this if we need to
private type Store = List[String] //List[Resource]
private var store: Store = List()
def init: Store = store
def init_=(t: Resource): Store = {
store = if (store contains t.toString) store else t.toString :: store
store
}
override def toString() = "fobobsres.Resource = " + store.toString()
/**
* Enable usage of FoBo's Bootstrap version 3․3․7 resources files in your bootstrap liftweb Boot.
* @version 3.3.7
*
* @example
*
* {{{
* import net.liftmodules.{fobobsres => fobo}
* :
* fobo.Resource.init=fobo.Resource.Bootstrap337
* }}}
* @since v1.7
*/
case object Bootstrap337 extends Resource {
FoBoResources.init
FoBoResources.bootstrap337
}
/**
* Enable usage of FoBo's Bootstrap version 3․2․0 resources files in your bootstrap liftweb Boot.
* @version 3.2.0
*
* @example
*
* {{{
* import net.liftmodules.{fobobsres => fobo}
* :
* fobo.Resource.init=fobo.Resource.Bootstrap320
* }}}
*
*/
case object Bootstrap320 extends Resource {
FoBoResources.init
FoBoResources.bootstrap320
}
/**
* Enable usage of FoBo's Bootstrap version 3․1․1 resources files in your bootstrap liftweb Boot.
* @version 3.1.1
*
* @example
*
* {{{
* import net.liftmodules.{fobobsres => fobo}
* :
* fobo.Resource.init=fobo.Resource.Bootstrap311
* }}}
*
*/
case object Bootstrap311 extends Resource {
FoBoResources.init
FoBoResources.bootstrap311
}
/**
* Enable usage of FoBo's Bootstrap version 3․0․1 resources files in your bootstrap liftweb Boot.
* @version 3.0.1
*
* @example
*
* {{{
* import net.liftmodules.{fobobsres => fobo}
* :
* fobo.Resource.init=fobo.Resource.Bootstrap301
* }}}
*
*/
case object Bootstrap301 extends Resource {
FoBoResources.init
FoBoResources.bootstrap301
}
}
/**
* Object holding internally used FoBo resources.
*/
private object FoBoResources {
lazy val init: Unit = {
ResourceServer.allow {
case "fobo" :: tail => true
}
}
lazy val bootstrap337: Unit = {
ResourceServer.rewrite {
case "fobo" :: "bootstrap.css" :: Nil if Props.devMode =>
List("fobo", "bootstrap", "3.3.7", "css", "bootstrap.css")
case "fobo" :: "bootstrap.css" :: Nil =>
List("fobo", "bootstrap", "3.3.7", "css", "bootstrap.min.css")
case "fobo" :: "bootstrap.css.map" :: Nil if Props.devMode =>
List("fobo", "bootstrap", "3.3.7", "css", "bootstrap.css.map")
case "fobo" :: "bootstrap.css.map" :: Nil =>
List("fobo", "bootstrap", "3.3.7", "css", "bootstrap.min.css.map")
case "fobo" :: "bootstrap-theme.css" :: Nil if Props.devMode =>
List("fobo", "bootstrap", "3.3.7", "css", "bootstrap-theme.css")
case "fobo" :: "bootstrap-theme.css" :: Nil =>
List("fobo", "bootstrap", "3.3.7", "css", "bootstrap-theme.min.css")
case "fobo" :: "bootstrap-theme.css.map" :: Nil if Props.devMode =>
List("fobo", "bootstrap", "3.3.7", "css", "bootstrap-theme.css.map")
case "fobo" :: "bootstrap-theme.css.map" :: Nil =>
List("fobo",
"bootstrap",
"3.3.7",
"css",
"bootstrap-theme.min.css.map")
case "fobo" :: "bootstrap.js" :: Nil if Props.devMode =>
List("fobo", "bootstrap", "3.3.7", "js", "bootstrap.js")
case "fobo" :: "bootstrap.js" :: Nil =>
List("fobo", "bootstrap", "3.3.7", "js", "bootstrap.min.js")
}
}
lazy val bootstrap320: Unit = {
ResourceServer.rewrite {
case "fobo" :: "bootstrap.css" :: Nil if Props.devMode =>
List("fobo", "bootstrap", "3.2.0", "css", "bootstrap.css")
case "fobo" :: "bootstrap.css" :: Nil =>
List("fobo", "bootstrap", "3.2.0", "css", "bootstrap.min.css")
case "fobo" :: "bootstrap.css.map" :: Nil =>
List("fobo", "bootstrap", "3.2.0", "css", "bootstrap.css.map")
case "fobo" :: "bootstrap-theme.css" :: Nil if Props.devMode =>
List("fobo", "bootstrap", "3.2.0", "css", "bootstrap-theme.css")
case "fobo" :: "bootstrap-theme.css" :: Nil =>
List("fobo", "bootstrap", "3.2.0", "css", "bootstrap-theme.min.css")
case "fobo" :: "bootstrap-theme.css.map" :: Nil =>
List("fobo", "bootstrap", "3.2.0", "css", "bootstrap-theme.css.map")
case "fobo" :: "bootstrap.js" :: Nil if Props.devMode =>
List("fobo", "bootstrap", "3.2.0", "js", "bootstrap.js")
case "fobo" :: "bootstrap.js" :: Nil =>
List("fobo", "bootstrap", "3.2.0", "js", "bootstrap.min.js")
}
}
lazy val bootstrap311: Unit = {
ResourceServer.rewrite {
case "fobo" :: "bootstrap.css" :: Nil if Props.devMode =>
List("fobo", "bootstrap", "3.1.1", "css", "bootstrap.css")
case "fobo" :: "bootstrap.css" :: Nil =>
List("fobo", "bootstrap", "3.1.1", "css", "bootstrap.min.css")
case "fobo" :: "bootstrap.css.map" :: Nil =>
List("fobo", "bootstrap", "3.1.1", "css", "bootstrap.css.map")
case "fobo" :: "bootstrap-theme.css" :: Nil if Props.devMode =>
List("fobo", "bootstrap", "3.1.1", "css", "bootstrap-theme.css")
case "fobo" :: "bootstrap-theme.css" :: Nil =>
List("fobo", "bootstrap", "3.1.1", "css", "bootstrap-theme.min.css")
case "fobo" :: "bootstrap-theme.css.map" :: Nil =>
List("fobo", "bootstrap", "3.1.1", "css", "bootstrap-theme.css.map")
case "fobo" :: "bootstrap.js" :: Nil if Props.devMode =>
List("fobo", "bootstrap", "3.1.1", "js", "bootstrap.js")
case "fobo" :: "bootstrap.js" :: Nil =>
List("fobo", "bootstrap", "3.1.1", "js", "bootstrap.min.js")
}
}
lazy val bootstrap301: Unit = {
ResourceServer.rewrite {
case "fobo" :: "bootstrap.css" :: Nil if Props.devMode =>
List("fobo", "bootstrap", "3.0.1", "css", "bootstrap.css")
case "fobo" :: "bootstrap.css" :: Nil =>
List("fobo", "bootstrap", "3.0.1", "css", "bootstrap.min.css")
case "fobo" :: "bootstrap-theme.css" :: Nil if Props.devMode =>
List("fobo", "bootstrap", "3.0.1", "css", "bootstrap-theme.css")
case "fobo" :: "bootstrap-theme.css" :: Nil =>
List("fobo", "bootstrap", "3.0.1", "css", "bootstrap-theme.min.css")
case "fobo" :: "bootstrap.js" :: Nil if Props.devMode =>
List("fobo", "bootstrap", "3.0.1", "js", "bootstrap.js")
case "fobo" :: "bootstrap.js" :: Nil =>
List("fobo", "bootstrap", "3.0.1", "js", "bootstrap.min.js")
}
}
}
}
| karma4u101/FoBo | Bootstrap/Bootstrap3/TwBs-Bootstrap3-Res/src/main/scala/net/liftmodules/fobobs/fobobsres.scala | Scala | apache-2.0 | 8,114 |
package ch.fhnw.imvs.trails
import org.scalatest.FunSuite
class TrailsTest extends FunSuite {
object T extends Trails
import T._
test("seq") {
val t0 = seq(success[Null,Null,String]("fst"), success[Null,Null,String]("snd"))
val res0 = t0(null)(null)
assert(res0.size === 1)
val (_, fst ~ snd) = res0.head
assert(fst === "fst")
assert(snd === "snd")
}
test("seq with fail -> fail") {
val t0 = seq(success[Null,Null,String]("fst"), T.fail[Null,Null])
val res0 = t0(null)(null)
assert(res0.size === 0)
val t1 = seq(T.fail[Null,Null], success[Null,Null,String]("snd"))
val res1 = t1(null)(null)
assert(res1.size === 0)
}
test("choice") {
val t0: Tr[Null,Null,Null,String] = choice(success("left"), success("right"))
val res0 = t0(null)(null)
assert(res0.size === 2)
val values = res0.map(_._2)
assert(values contains "left")
assert(values contains "right")
}
test("choice has fail as neutral element") {
val t0: Tr[Null,Null,Null,String] = choice(success("left"), T.fail)
val res0 = t0(null)(null)
assert(res0.size === 1)
val (_, v0) = res0.head
assert(v0 === "left")
val t1: Tr[Null,Null,Null,String] = choice(T.fail, success("right"))
val res1 = t1(null)(null)
assert(res1.size === 1)
val (_, v1) = res1.head
assert(v1 === "right")
}
test("choice is lazy") {
def manyS: Tr[Null,Null,Null,String] = choice(success("S"), manyS)
val res0 = manyS(null)(null)
val ten = res0.take(10)
assert(ten.size === 10)
ten.foreach{
case (s0,v0) => assert(v0 === "S")
}
}
test("mmany (choice is lazy II)") {
def mmany[E,S,A](tr: Tr[E, S, S, A]): Tr[E, S, S, Stream[A]] = T.success[E,S,Stream[A]](Stream()) | tr ~ mmany(tr) ^^ { case a ~ as => a #:: as}
val t0 = mmany(success[Null,Null,String]("S"))
val ten = t0(null)(null).take(10)
assert(ten.size === 10)
ten.zipWithIndex.foreach {
case ((s0,v0),i) =>
assert(v0.size === i)
assert(v0.forall(_ == "S"))
}
}
test("seq should not allow meaningless recursion") {
intercept[StackOverflowError] {
lazy val manyS: Tr[Null,Null,Null,Nothing] = seq(manyS, T.fail[Null,Null]).map{case a ~ b => b}
manyS
}
}
}
| danielkroeni/trails | core/src/test/scala/ch/fhnw/imvs/trails/TrailsTest.scala | Scala | mit | 2,290 |
/*
* Copyright 2011-2022 GatlingCorp (https://gatling.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.gatling.recorder.config
import io.gatling.commons.util.ClassSimpleNameToString
import io.gatling.recorder.util.Labelled
sealed abstract class RecorderMode(val label: String) extends Labelled with ClassSimpleNameToString with Product with Serializable
object RecorderMode {
case object Proxy extends RecorderMode("HTTP Proxy")
case object Har extends RecorderMode("HAR Converter")
val AllModes: List[RecorderMode] = List(Proxy, Har)
def apply(s: String): RecorderMode =
AllModes.find(_.toString == s).getOrElse {
throw new IllegalArgumentException(s"$s is not a valid Recorder mode")
}
}
| gatling/gatling | gatling-recorder/src/main/scala/io/gatling/recorder/config/RecorderMode.scala | Scala | apache-2.0 | 1,245 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.